From 0fea7c33f07d766e346eded1d228730913d40995 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 3 Oct 2025 15:35:03 +0000 Subject: [PATCH] WIP test docs --- repositories.bzl | 2 +- .../asset/google/cloud/asset/__init__.py | 8 + .../asset/google/cloud/asset_v1/__init__.py | 8 + .../services/asset_service/async_client.py | 276 +- .../asset_v1/services/asset_service/client.py | 276 +- .../services/asset_service/transports/grpc.py | 75 +- .../asset_service/transports/grpc_asyncio.py | 75 +- .../google/cloud/asset_v1/types/__init__.py | 10 + .../types/asset_enrichment_resourceowners.py | 45 + .../cloud/asset_v1/types/asset_service.py | 507 +- .../google/cloud/asset_v1/types/assets.py | 284 +- .../google/cloud/eventarc_v1/__init__.py | 66 + .../cloud/eventarc_v1/gapic_metadata.json | 315 + .../services/eventarc/async_client.py | 2792 +- .../eventarc_v1/services/eventarc/client.py | 2825 +- .../eventarc_v1/services/eventarc/pagers.py | 699 + .../services/eventarc/transports/base.py | 298 + .../services/eventarc/transports/grpc.py | 558 +- .../eventarc/transports/grpc_asyncio.py | 663 +- .../services/eventarc/transports/rest.py | 4726 ++- .../services/eventarc/transports/rest_base.py | 960 +- .../cloud/eventarc_v1/types/__init__.py | 78 + .../google/cloud/eventarc_v1/types/channel.py | 15 + .../eventarc_v1/types/channel_connection.py | 7 + .../cloud/eventarc_v1/types/enrollment.py | 130 + .../cloud/eventarc_v1/types/eventarc.py | 992 +- .../eventarc_v1/types/google_api_source.py | 133 + .../types/google_channel_config.py | 10 + .../cloud/eventarc_v1/types/logging_config.py | 96 + .../cloud/eventarc_v1/types/message_bus.py | 127 + .../cloud/eventarc_v1/types/network_config.py | 48 + .../cloud/eventarc_v1/types/pipeline.py | 948 + .../google/cloud/eventarc_v1/types/trigger.py | 107 +- ...generated_eventarc_create_channel_async.py | 1 - ..._generated_eventarc_create_channel_sync.py | 1 - ...erated_eventarc_create_enrollment_async.py | 63 + ...nerated_eventarc_create_enrollment_sync.py | 63 + ...eventarc_create_google_api_source_async.py | 61 + ..._eventarc_create_google_api_source_sync.py | 61 + ...rated_eventarc_create_message_bus_async.py | 57 + ...erated_eventarc_create_message_bus_sync.py | 57 + ...enerated_eventarc_create_pipeline_async.py | 61 + ...generated_eventarc_create_pipeline_sync.py | 61 + ...generated_eventarc_create_trigger_async.py | 1 - ..._generated_eventarc_create_trigger_sync.py | 1 - ...generated_eventarc_delete_channel_async.py | 1 - ..._generated_eventarc_delete_channel_sync.py | 1 - ...erated_eventarc_delete_enrollment_async.py | 56 + ...nerated_eventarc_delete_enrollment_sync.py | 56 + ...eventarc_delete_google_api_source_async.py | 56 + ..._eventarc_delete_google_api_source_sync.py | 56 + ...rated_eventarc_delete_message_bus_async.py | 56 + ...erated_eventarc_delete_message_bus_sync.py | 56 + ...enerated_eventarc_delete_pipeline_async.py | 56 + ...generated_eventarc_delete_pipeline_sync.py | 56 + ...generated_eventarc_delete_trigger_async.py | 1 - ..._generated_eventarc_delete_trigger_sync.py | 1 - ...generated_eventarc_get_enrollment_async.py | 52 + ..._generated_eventarc_get_enrollment_sync.py | 52 + ...ed_eventarc_get_google_api_source_async.py | 52 + ...ted_eventarc_get_google_api_source_sync.py | 52 + ...enerated_eventarc_get_message_bus_async.py | 52 + ...generated_eventarc_get_message_bus_sync.py | 52 + ...1_generated_eventarc_get_pipeline_async.py | 52 + ...v1_generated_eventarc_get_pipeline_sync.py | 52 + ...nerated_eventarc_list_enrollments_async.py | 53 + ...enerated_eventarc_list_enrollments_sync.py | 53 + ..._eventarc_list_google_api_sources_async.py | 53 + ...d_eventarc_list_google_api_sources_sync.py | 53 + ...tarc_list_message_bus_enrollments_async.py | 53 + ...ntarc_list_message_bus_enrollments_sync.py | 53 + ...rated_eventarc_list_message_buses_async.py | 53 + ...erated_eventarc_list_message_buses_sync.py | 53 + ...generated_eventarc_list_pipelines_async.py | 53 + ..._generated_eventarc_list_pipelines_sync.py | 53 + ...generated_eventarc_update_channel_async.py | 1 - ..._generated_eventarc_update_channel_sync.py | 1 - ...erated_eventarc_update_enrollment_async.py | 61 + ...nerated_eventarc_update_enrollment_sync.py | 61 + ...eventarc_update_google_api_source_async.py | 59 + ..._eventarc_update_google_api_source_sync.py | 59 + ...rated_eventarc_update_message_bus_async.py | 55 + ...erated_eventarc_update_message_bus_sync.py | 55 + ...enerated_eventarc_update_pipeline_async.py | 59 + ...generated_eventarc_update_pipeline_sync.py | 59 + ...generated_eventarc_update_trigger_async.py | 1 - ..._generated_eventarc_update_trigger_sync.py | 1 - ...pet_metadata_google.cloud.eventarc.v1.json | 4551 ++- .../scripts/fixup_eventarc_v1_keywords.py | 27 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 23829 +++++++++++++--- 90 files changed, 43461 insertions(+), 5463 deletions(-) create mode 100755 tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_enrichment_resourceowners.py create mode 100755 tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/enrollment.py create mode 100755 tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_api_source.py create mode 100755 tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/logging_config.py create mode 100755 tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/message_bus.py create mode 100755 tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/network_config.py create mode 100755 tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/pipeline.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_sync.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_async.py create mode 100755 tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_sync.py diff --git a/repositories.bzl b/repositories.bzl index 1816e77d3f..836ae8d992 100644 --- a/repositories.bzl +++ b/repositories.bzl @@ -25,7 +25,7 @@ def gapic_generator_python(): strip_prefix = "rules_gapic-%s" % _rules_gapic_version, urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version], ) - _commit_sha = "fae3e6e091418d6343902debaf545cfc8f32c3ff" + _commit_sha = "9070e63a1f574261c153ef6e94afc55677200337" _maybe( http_archive, name = "com_google_googleapis", diff --git a/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/tests/integration/goldens/asset/google/cloud/asset/__init__.py index fd9404f4b6..eff2c7f186 100755 --- a/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -21,6 +21,7 @@ from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient +from google.cloud.asset_v1.types.asset_enrichment_resourceowners import ResourceOwners from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningMetadata from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningRequest from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningResponse @@ -83,8 +84,10 @@ from google.cloud.asset_v1.types.asset_service import UpdateSavedQueryRequest from google.cloud.asset_v1.types.asset_service import ContentType from google.cloud.asset_v1.types.assets import Asset +from google.cloud.asset_v1.types.assets import AssetEnrichment from google.cloud.asset_v1.types.assets import AttachedResource from google.cloud.asset_v1.types.assets import ConditionEvaluation +from google.cloud.asset_v1.types.assets import EffectiveTagDetails from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState from google.cloud.asset_v1.types.assets import IamPolicySearchResult @@ -95,12 +98,14 @@ from google.cloud.asset_v1.types.assets import RelationshipAttributes from google.cloud.asset_v1.types.assets import Resource from google.cloud.asset_v1.types.assets import ResourceSearchResult +from google.cloud.asset_v1.types.assets import Tag from google.cloud.asset_v1.types.assets import TemporalAsset from google.cloud.asset_v1.types.assets import TimeWindow from google.cloud.asset_v1.types.assets import VersionedResource __all__ = ('AssetServiceClient', 'AssetServiceAsyncClient', + 'ResourceOwners', 'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', @@ -163,8 +168,10 @@ 'UpdateSavedQueryRequest', 'ContentType', 'Asset', + 'AssetEnrichment', 'AttachedResource', 'ConditionEvaluation', + 'EffectiveTagDetails', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', @@ -175,6 +182,7 @@ 'RelationshipAttributes', 'Resource', 'ResourceSearchResult', + 'Tag', 'TemporalAsset', 'TimeWindow', 'VersionedResource', diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 03ee9ec521..ef9147871d 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -21,6 +21,7 @@ from .services.asset_service import AssetServiceClient from .services.asset_service import AssetServiceAsyncClient +from .types.asset_enrichment_resourceowners import ResourceOwners from .types.asset_service import AnalyzeIamPolicyLongrunningMetadata from .types.asset_service import AnalyzeIamPolicyLongrunningRequest from .types.asset_service import AnalyzeIamPolicyLongrunningResponse @@ -83,8 +84,10 @@ from .types.asset_service import UpdateSavedQueryRequest from .types.asset_service import ContentType from .types.assets import Asset +from .types.assets import AssetEnrichment from .types.assets import AttachedResource from .types.assets import ConditionEvaluation +from .types.assets import EffectiveTagDetails from .types.assets import IamPolicyAnalysisResult from .types.assets import IamPolicyAnalysisState from .types.assets import IamPolicySearchResult @@ -95,6 +98,7 @@ from .types.assets import RelationshipAttributes from .types.assets import Resource from .types.assets import ResourceSearchResult +from .types.assets import Tag from .types.assets import TemporalAsset from .types.assets import TimeWindow from .types.assets import VersionedResource @@ -117,6 +121,7 @@ 'AnalyzerOrgPolicy', 'AnalyzerOrgPolicyConstraint', 'Asset', +'AssetEnrichment', 'AssetServiceClient', 'AttachedResource', 'BatchGetAssetsHistoryRequest', @@ -130,6 +135,7 @@ 'CreateSavedQueryRequest', 'DeleteFeedRequest', 'DeleteSavedQueryRequest', +'EffectiveTagDetails', 'ExportAssetsRequest', 'ExportAssetsResponse', 'Feed', @@ -166,6 +172,7 @@ 'RelatedResources', 'RelationshipAttributes', 'Resource', +'ResourceOwners', 'ResourceSearchResult', 'SavedQuery', 'SearchAllIamPoliciesRequest', @@ -174,6 +181,7 @@ 'SearchAllResourcesResponse', 'TableFieldSchema', 'TableSchema', +'Tag', 'TemporalAsset', 'TimeWindow', 'UpdateFeedRequest', diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 1612a30704..cfec17f314 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1254,78 +1254,117 @@ async def sample_search_all_resources(): * `name:Important` to find Google Cloud resources whose name contains - "Important" as a word. + `Important` as a word. * `name=Important` to find the Google Cloud resource whose name is exactly - "Important". + `Important`. * `displayName:Impor*` to find Google Cloud resources whose display name - contains "Impor" as a prefix of any word + contains `Impor` as a prefix of any word in the field. * `location:us-west*` to find Google Cloud resources whose - location contains both "us" and "west" + location contains both `us` and `west` as prefixes. * `labels:prod` to find Google Cloud resources whose labels - contain "prod" as a key or value. + contain `prod` as a key or value. * `labels.env:prod` to find Google Cloud - resources that have a label "env" and - its value is "prod". + resources that have a label `env` and + its value is `prod`. * `labels.env:*` to find Google Cloud - resources that have a label "env". * - `kmsKey:key` to find Google Cloud + resources that have a label `env`. * + `tagKeys:env` to find Google Cloud + resources that have directly attached + tags where the + [`TagKey.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagKeys#resource:-tagkey) + contains `env`. + * `tagValues:prod*` to find Google Cloud + resources that have directly attached + tags where the + [`TagValue.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + contains a word prefixed by `prod`. + * `tagValueIds=tagValues/123` to find + Google Cloud resources that have + directly attached tags where the + [`TagValue.name`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + is exactly `tagValues/123`. + * `effectiveTagKeys:env` to find Google + Cloud resources that have directly + attached or inherited tags where the + [`TagKey.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagKeys#resource:-tagkey) + contains `env`. + * `effectiveTagValues:prod*` to find + Google Cloud resources that have + directly attached or inherited tags + where the + [`TagValue.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + contains a word prefixed by `prod`. + * `effectiveTagValueIds=tagValues/123` + to find Google Cloud resources that + have directly attached or inherited tags + where the + [`TagValue.name`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + is exactly `tagValues/123`. + * `kmsKey:key` to find Google Cloud resources encrypted with a customer-managed encryption key whose - name contains "key" as a word. This - field is deprecated. Please use the - `kmsKeys` field to retrieve Cloud KMS - key information. + name contains `key` as a word. This + field is deprecated. Use the `kmsKeys` + field to retrieve Cloud KMS key + information. * `kmsKeys:key` to find Google Cloud resources encrypted with customer-managed encryption keys whose - name contains the word "key". * + name contains the word `key`. * `relationships:instance-group-1` to find Google Cloud resources that have - relationships with "instance-group-1" in + relationships with `instance-group-1` in the related resource name. * `relationships:INSTANCE_TO_INSTANCEGROUP` to find Compute Engine instances that have relationships of type - "INSTANCE_TO_INSTANCEGROUP". * + `INSTANCE_TO_INSTANCEGROUP`. * `relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1` to find Compute Engine instances that have relationships with - "instance-group-1" in the Compute + `instance-group-1` in the Compute Engine instance group resource name, for relationship type - "INSTANCE_TO_INSTANCEGROUP". + `INSTANCE_TO_INSTANCEGROUP`. + * `sccSecurityMarks.key=value` to find + Cloud resources that are attached with + security marks whose key is `key` and + value is `value`. * + `sccSecurityMarks.key:*` to find Cloud + resources that are attached with + security marks whose key is `key`. * `state:ACTIVE` to find Google Cloud resources whose state contains - "ACTIVE" as a word. + `ACTIVE` as a word. * `NOT state:ACTIVE` to find Google Cloud resources whose state doesn't - contain "ACTIVE" as a word. + contain `ACTIVE` as a word. * `createTime<1609459200` to find Google Cloud resources that were created - before "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. + before `2021-01-01 00:00:00 UTC`. + `1609459200` is the epoch timestamp of + `2021-01-01 00:00:00 UTC` in seconds. * `updateTime>1609459200` to find Google Cloud resources that were updated - after "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. + after `2021-01-01 00:00:00 UTC`. + `1609459200` is the epoch timestamp of + `2021-01-01 00:00:00 UTC` in seconds. * `Important` to find Google Cloud - resources that contain "Important" as a + resources that contain `Important` as a word in any of the searchable fields. * `Impor*` to find Google Cloud - resources that contain "Impor" as a + resources that contain `Impor` as a prefix of any word in any of the searchable fields. * `Important location:(us-west1 OR global)` to find Google Cloud resources that contain - "Important" as a word in any of the + `Important` as a word in any of the searchable fields and are also located - in the "us-west1" region or the "global" + in the `us-west1` region or the `global` location. This corresponds to the ``query`` field @@ -1334,8 +1373,9 @@ async def sample_search_all_resources(): asset_types (:class:`MutableSequence[str]`): Optional. A list of asset types that this request searches for. If empty, it - will search all the [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types). + will search all the asset types + [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types). Regular expressions are also supported. For example: @@ -1962,8 +2002,8 @@ async def query_assets(self, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement - compatible with [BigQuery Standard - SQL](http://cloud/bigquery/docs/reference/standard-sql/enabling-standard-sql). + compatible with [BigQuery + SQL](https://cloud.google.com/bigquery/docs/introduction-sql). If the query execution finishes within timeout and there's no pagination, the full query results will be @@ -1974,9 +2014,9 @@ async def query_assets(self, previous `QueryAssets` call. Note, the query result has approximately 10 GB - limitation enforced by BigQuery - https://cloud.google.com/bigquery/docs/best-practices-performance-output, - queries return larger results will result in errors. + limitation enforced by + [BigQuery](https://cloud.google.com/bigquery/docs/best-practices-performance-output). + Queries return larger results will result in errors. .. code-block:: python @@ -2814,15 +2854,19 @@ async def sample_analyze_org_policies(): filter (:class:`str`): The expression to filter [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - The only supported field is - `consolidated_policy.attached_resource`, - and the only supported operator is `=`. + Filtering is currently available for + bare literal values and the following + fields: - Example: + * consolidated_policy.attached_resource + * consolidated_policy.rules.enforce + When filtering by a specific field, the + only supported operator is `=`. For + example, filtering by consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return the org policy results - of"folders/001". + will return all the Organization Policy + results attached to "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -2974,15 +3018,20 @@ async def sample_analyze_org_policy_governed_containers(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - The expression to filter the governed - containers in result. The only supported - field is `parent`, and the only - supported operator is `=`. + The expression to filter + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + Filtering is currently available for + bare literal values and the following + fields: - Example: + * parent + * consolidated_policy.rules.enforce + When filtering by a specific field, the + only supported operator is `=`. For + example, filtering by parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all containers under + will return all the containers under "folders/001". This corresponds to the ``filter`` field @@ -3078,23 +3127,58 @@ async def analyze_org_policy_governed_assets(self, ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This - RPC supports custom constraints and the following 10 - canned constraints: - - * storage.uniformBucketLevelAccess - * iam.disableServiceAccountKeyCreation - * iam.allowedPolicyMemberDomains - * compute.vmExternalIpAccess - * appengine.enforceServiceAccountActAsCheck - * gcp.resourceLocations - * compute.trustedImageProjects - * compute.skipDefaultNetworkCreation - * compute.requireOsLogin - * compute.disableNestedVirtualization + RPC supports custom constraints and the following canned + constraints: + + * constraints/ainotebooks.accessMode + * constraints/ainotebooks.disableFileDownloads + * constraints/ainotebooks.disableRootAccess + * constraints/ainotebooks.disableTerminal + * constraints/ainotebooks.environmentOptions + * constraints/ainotebooks.requireAutoUpgradeSchedule * + constraints/ainotebooks.restrictVpcNetworks + * constraints/compute.disableGuestAttributesAccess * + constraints/compute.disableInstanceDataAccessApis * + constraints/compute.disableNestedVirtualization * + constraints/compute.disableSerialPortAccess + * constraints/compute.disableSerialPortLogging + * constraints/compute.disableVpcExternalIpv6 + * constraints/compute.requireOsLogin + * constraints/compute.requireShieldedVm + * + constraints/compute.restrictLoadBalancerCreationForTypes + * + constraints/compute.restrictProtocolForwardingCreationForTypes + * constraints/compute.restrictXpnProjectLienRemoval * + constraints/compute.setNewProjectDefaultToZonalDNSOnly * + constraints/compute.skipDefaultNetworkCreation * + constraints/compute.trustedImageProjects + * constraints/compute.vmCanIpForward + * constraints/compute.vmExternalIpAccess + * constraints/gcp.detailedAuditLoggingMode + * constraints/gcp.resourceLocations + * constraints/iam.allowedPolicyMemberDomains + * + constraints/iam.automaticIamGrantsForDefaultServiceAccounts + * constraints/iam.disableServiceAccountCreation + * constraints/iam.disableServiceAccountKeyCreation * + constraints/iam.disableServiceAccountKeyUpload * + constraints/iam.restrictCrossProjectServiceAccountLienRemoval + * constraints/iam.serviceAccountKeyExpiryHours + * constraints/resourcemanager.accessBoundaries + * constraints/resourcemanager.allowedExportDestinations + * constraints/sql.restrictAuthorizedNetworks + * + constraints/sql.restrictNoncompliantDiagnosticDataAccess + * constraints/sql.restrictNoncompliantResourceCreation * + constraints/sql.restrictPublicIp + * constraints/storage.publicAccessPrevention + * constraints/storage.restrictAuthTypes + * constraints/storage.uniformBucketLevelAccess This RPC only returns either resources of types - supported by [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types), + [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types) or IAM policies. .. code-block:: python @@ -3154,29 +3238,47 @@ async def sample_analyze_org_policy_governed_assets(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - The expression to filter the governed - assets in result. The only supported - fields for governed resources are - `governed_resource.project` and - `governed_resource.folders`. The only - supported fields for governed iam - policies are - `governed_iam_policy.project` and - `governed_iam_policy.folders`. The only - supported operator is `=`. - - Example 1: - governed_resource.project="projects/12345678" - filter will return all governed - resources under projects/12345678 - including the project ifself, if - applicable. - - Example 2: - governed_iam_policy.folders="folders/12345678" - filter will return all governed iam - policies under folders/12345678, if - applicable. + The expression to filter + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + + For governed resources, filtering is + currently available for bare literal + values and the following fields: + + * governed_resource.project + * governed_resource.folders + * consolidated_policy.rules.enforce + When filtering by + `governed_resource.project` or + `consolidated_policy.rules.enforce`, the + only supported operator is `=`. When + filtering by + `governed_resource.folders`, the + supported operators are `=` and `:`. + For example, filtering by + `governed_resource.project="projects/12345678"` + will return all the governed resources + under "projects/12345678", including the + project itself if applicable. + + For governed IAM policies, filtering is + currently available for bare literal + values and the following fields: + + * governed_iam_policy.project + * governed_iam_policy.folders + * consolidated_policy.rules.enforce + When filtering by + `governed_iam_policy.project` or + `consolidated_policy.rules.enforce`, the + only supported operator is `=`. When + filtering by + `governed_iam_policy.folders`, the + supported operators are `=` and `:`. + For example, filtering by + `governed_iam_policy.folders:"folders/12345678"` + will return all the governed IAM + policies under "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index c5f99f9c49..99fd849274 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1651,78 +1651,117 @@ def sample_search_all_resources(): * `name:Important` to find Google Cloud resources whose name contains - "Important" as a word. + `Important` as a word. * `name=Important` to find the Google Cloud resource whose name is exactly - "Important". + `Important`. * `displayName:Impor*` to find Google Cloud resources whose display name - contains "Impor" as a prefix of any word + contains `Impor` as a prefix of any word in the field. * `location:us-west*` to find Google Cloud resources whose - location contains both "us" and "west" + location contains both `us` and `west` as prefixes. * `labels:prod` to find Google Cloud resources whose labels - contain "prod" as a key or value. + contain `prod` as a key or value. * `labels.env:prod` to find Google Cloud - resources that have a label "env" and - its value is "prod". + resources that have a label `env` and + its value is `prod`. * `labels.env:*` to find Google Cloud - resources that have a label "env". * - `kmsKey:key` to find Google Cloud + resources that have a label `env`. * + `tagKeys:env` to find Google Cloud + resources that have directly attached + tags where the + [`TagKey.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagKeys#resource:-tagkey) + contains `env`. + * `tagValues:prod*` to find Google Cloud + resources that have directly attached + tags where the + [`TagValue.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + contains a word prefixed by `prod`. + * `tagValueIds=tagValues/123` to find + Google Cloud resources that have + directly attached tags where the + [`TagValue.name`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + is exactly `tagValues/123`. + * `effectiveTagKeys:env` to find Google + Cloud resources that have directly + attached or inherited tags where the + [`TagKey.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagKeys#resource:-tagkey) + contains `env`. + * `effectiveTagValues:prod*` to find + Google Cloud resources that have + directly attached or inherited tags + where the + [`TagValue.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + contains a word prefixed by `prod`. + * `effectiveTagValueIds=tagValues/123` + to find Google Cloud resources that + have directly attached or inherited tags + where the + [`TagValue.name`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + is exactly `tagValues/123`. + * `kmsKey:key` to find Google Cloud resources encrypted with a customer-managed encryption key whose - name contains "key" as a word. This - field is deprecated. Please use the - `kmsKeys` field to retrieve Cloud KMS - key information. + name contains `key` as a word. This + field is deprecated. Use the `kmsKeys` + field to retrieve Cloud KMS key + information. * `kmsKeys:key` to find Google Cloud resources encrypted with customer-managed encryption keys whose - name contains the word "key". * + name contains the word `key`. * `relationships:instance-group-1` to find Google Cloud resources that have - relationships with "instance-group-1" in + relationships with `instance-group-1` in the related resource name. * `relationships:INSTANCE_TO_INSTANCEGROUP` to find Compute Engine instances that have relationships of type - "INSTANCE_TO_INSTANCEGROUP". * + `INSTANCE_TO_INSTANCEGROUP`. * `relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1` to find Compute Engine instances that have relationships with - "instance-group-1" in the Compute + `instance-group-1` in the Compute Engine instance group resource name, for relationship type - "INSTANCE_TO_INSTANCEGROUP". + `INSTANCE_TO_INSTANCEGROUP`. + * `sccSecurityMarks.key=value` to find + Cloud resources that are attached with + security marks whose key is `key` and + value is `value`. * + `sccSecurityMarks.key:*` to find Cloud + resources that are attached with + security marks whose key is `key`. * `state:ACTIVE` to find Google Cloud resources whose state contains - "ACTIVE" as a word. + `ACTIVE` as a word. * `NOT state:ACTIVE` to find Google Cloud resources whose state doesn't - contain "ACTIVE" as a word. + contain `ACTIVE` as a word. * `createTime<1609459200` to find Google Cloud resources that were created - before "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. + before `2021-01-01 00:00:00 UTC`. + `1609459200` is the epoch timestamp of + `2021-01-01 00:00:00 UTC` in seconds. * `updateTime>1609459200` to find Google Cloud resources that were updated - after "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. + after `2021-01-01 00:00:00 UTC`. + `1609459200` is the epoch timestamp of + `2021-01-01 00:00:00 UTC` in seconds. * `Important` to find Google Cloud - resources that contain "Important" as a + resources that contain `Important` as a word in any of the searchable fields. * `Impor*` to find Google Cloud - resources that contain "Impor" as a + resources that contain `Impor` as a prefix of any word in any of the searchable fields. * `Important location:(us-west1 OR global)` to find Google Cloud resources that contain - "Important" as a word in any of the + `Important` as a word in any of the searchable fields and are also located - in the "us-west1" region or the "global" + in the `us-west1` region or the `global` location. This corresponds to the ``query`` field @@ -1731,8 +1770,9 @@ def sample_search_all_resources(): asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it - will search all the [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types). + will search all the asset types + [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types). Regular expressions are also supported. For example: @@ -2357,8 +2397,8 @@ def query_assets(self, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement - compatible with [BigQuery Standard - SQL](http://cloud/bigquery/docs/reference/standard-sql/enabling-standard-sql). + compatible with [BigQuery + SQL](https://cloud.google.com/bigquery/docs/introduction-sql). If the query execution finishes within timeout and there's no pagination, the full query results will be @@ -2369,9 +2409,9 @@ def query_assets(self, previous `QueryAssets` call. Note, the query result has approximately 10 GB - limitation enforced by BigQuery - https://cloud.google.com/bigquery/docs/best-practices-performance-output, - queries return larger results will result in errors. + limitation enforced by + [BigQuery](https://cloud.google.com/bigquery/docs/best-practices-performance-output). + Queries return larger results will result in errors. .. code-block:: python @@ -3204,15 +3244,19 @@ def sample_analyze_org_policies(): filter (str): The expression to filter [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - The only supported field is - `consolidated_policy.attached_resource`, - and the only supported operator is `=`. + Filtering is currently available for + bare literal values and the following + fields: - Example: + * consolidated_policy.attached_resource + * consolidated_policy.rules.enforce + When filtering by a specific field, the + only supported operator is `=`. For + example, filtering by consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return the org policy results - of"folders/001". + will return all the Organization Policy + results attached to "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -3363,15 +3407,20 @@ def sample_analyze_org_policy_governed_containers(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - The expression to filter the governed - containers in result. The only supported - field is `parent`, and the only - supported operator is `=`. + The expression to filter + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + Filtering is currently available for + bare literal values and the following + fields: - Example: + * parent + * consolidated_policy.rules.enforce + When filtering by a specific field, the + only supported operator is `=`. For + example, filtering by parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all containers under + will return all the containers under "folders/001". This corresponds to the ``filter`` field @@ -3466,23 +3515,58 @@ def analyze_org_policy_governed_assets(self, ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This - RPC supports custom constraints and the following 10 - canned constraints: - - * storage.uniformBucketLevelAccess - * iam.disableServiceAccountKeyCreation - * iam.allowedPolicyMemberDomains - * compute.vmExternalIpAccess - * appengine.enforceServiceAccountActAsCheck - * gcp.resourceLocations - * compute.trustedImageProjects - * compute.skipDefaultNetworkCreation - * compute.requireOsLogin - * compute.disableNestedVirtualization + RPC supports custom constraints and the following canned + constraints: + + * constraints/ainotebooks.accessMode + * constraints/ainotebooks.disableFileDownloads + * constraints/ainotebooks.disableRootAccess + * constraints/ainotebooks.disableTerminal + * constraints/ainotebooks.environmentOptions + * constraints/ainotebooks.requireAutoUpgradeSchedule * + constraints/ainotebooks.restrictVpcNetworks + * constraints/compute.disableGuestAttributesAccess * + constraints/compute.disableInstanceDataAccessApis * + constraints/compute.disableNestedVirtualization * + constraints/compute.disableSerialPortAccess + * constraints/compute.disableSerialPortLogging + * constraints/compute.disableVpcExternalIpv6 + * constraints/compute.requireOsLogin + * constraints/compute.requireShieldedVm + * + constraints/compute.restrictLoadBalancerCreationForTypes + * + constraints/compute.restrictProtocolForwardingCreationForTypes + * constraints/compute.restrictXpnProjectLienRemoval * + constraints/compute.setNewProjectDefaultToZonalDNSOnly * + constraints/compute.skipDefaultNetworkCreation * + constraints/compute.trustedImageProjects + * constraints/compute.vmCanIpForward + * constraints/compute.vmExternalIpAccess + * constraints/gcp.detailedAuditLoggingMode + * constraints/gcp.resourceLocations + * constraints/iam.allowedPolicyMemberDomains + * + constraints/iam.automaticIamGrantsForDefaultServiceAccounts + * constraints/iam.disableServiceAccountCreation + * constraints/iam.disableServiceAccountKeyCreation * + constraints/iam.disableServiceAccountKeyUpload * + constraints/iam.restrictCrossProjectServiceAccountLienRemoval + * constraints/iam.serviceAccountKeyExpiryHours + * constraints/resourcemanager.accessBoundaries + * constraints/resourcemanager.allowedExportDestinations + * constraints/sql.restrictAuthorizedNetworks + * + constraints/sql.restrictNoncompliantDiagnosticDataAccess + * constraints/sql.restrictNoncompliantResourceCreation * + constraints/sql.restrictPublicIp + * constraints/storage.publicAccessPrevention + * constraints/storage.restrictAuthTypes + * constraints/storage.uniformBucketLevelAccess This RPC only returns either resources of types - supported by [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types), + [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types) or IAM policies. .. code-block:: python @@ -3542,29 +3626,47 @@ def sample_analyze_org_policy_governed_assets(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - The expression to filter the governed - assets in result. The only supported - fields for governed resources are - `governed_resource.project` and - `governed_resource.folders`. The only - supported fields for governed iam - policies are - `governed_iam_policy.project` and - `governed_iam_policy.folders`. The only - supported operator is `=`. - - Example 1: - governed_resource.project="projects/12345678" - filter will return all governed - resources under projects/12345678 - including the project ifself, if - applicable. - - Example 2: - governed_iam_policy.folders="folders/12345678" - filter will return all governed iam - policies under folders/12345678, if - applicable. + The expression to filter + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + + For governed resources, filtering is + currently available for bare literal + values and the following fields: + + * governed_resource.project + * governed_resource.folders + * consolidated_policy.rules.enforce + When filtering by + `governed_resource.project` or + `consolidated_policy.rules.enforce`, the + only supported operator is `=`. When + filtering by + `governed_resource.folders`, the + supported operators are `=` and `:`. + For example, filtering by + `governed_resource.project="projects/12345678"` + will return all the governed resources + under "projects/12345678", including the + project itself if applicable. + + For governed IAM policies, filtering is + currently available for bare literal + values and the following fields: + + * governed_iam_policy.project + * governed_iam_policy.folders + * consolidated_policy.rules.enforce + When filtering by + `governed_iam_policy.project` or + `consolidated_policy.rules.enforce`, the + only supported operator is `=`. When + filtering by + `governed_iam_policy.folders`, the + supported operators are `=` and `:`. + For example, filtering by + `governed_iam_policy.folders:"folders/12345678"` + will return all the governed IAM + policies under "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 6c8103049d..8b6fed7efc 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -726,8 +726,8 @@ def query_assets(self) -> Callable[ r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement - compatible with [BigQuery Standard - SQL](http://cloud/bigquery/docs/reference/standard-sql/enabling-standard-sql). + compatible with [BigQuery + SQL](https://cloud.google.com/bigquery/docs/introduction-sql). If the query execution finishes within timeout and there's no pagination, the full query results will be @@ -738,9 +738,9 @@ def query_assets(self) -> Callable[ previous `QueryAssets` call. Note, the query result has approximately 10 GB - limitation enforced by BigQuery - https://cloud.google.com/bigquery/docs/best-practices-performance-output, - queries return larger results will result in errors. + limitation enforced by + [BigQuery](https://cloud.google.com/bigquery/docs/best-practices-performance-output). + Queries return larger results will result in errors. Returns: Callable[[~.QueryAssetsRequest], @@ -982,23 +982,58 @@ def analyze_org_policy_governed_assets(self) -> Callable[ Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This - RPC supports custom constraints and the following 10 - canned constraints: - - * storage.uniformBucketLevelAccess - * iam.disableServiceAccountKeyCreation - * iam.allowedPolicyMemberDomains - * compute.vmExternalIpAccess - * appengine.enforceServiceAccountActAsCheck - * gcp.resourceLocations - * compute.trustedImageProjects - * compute.skipDefaultNetworkCreation - * compute.requireOsLogin - * compute.disableNestedVirtualization + RPC supports custom constraints and the following canned + constraints: + + * constraints/ainotebooks.accessMode + * constraints/ainotebooks.disableFileDownloads + * constraints/ainotebooks.disableRootAccess + * constraints/ainotebooks.disableTerminal + * constraints/ainotebooks.environmentOptions + * constraints/ainotebooks.requireAutoUpgradeSchedule * + constraints/ainotebooks.restrictVpcNetworks + * constraints/compute.disableGuestAttributesAccess * + constraints/compute.disableInstanceDataAccessApis * + constraints/compute.disableNestedVirtualization * + constraints/compute.disableSerialPortAccess + * constraints/compute.disableSerialPortLogging + * constraints/compute.disableVpcExternalIpv6 + * constraints/compute.requireOsLogin + * constraints/compute.requireShieldedVm + * + constraints/compute.restrictLoadBalancerCreationForTypes + * + constraints/compute.restrictProtocolForwardingCreationForTypes + * constraints/compute.restrictXpnProjectLienRemoval * + constraints/compute.setNewProjectDefaultToZonalDNSOnly * + constraints/compute.skipDefaultNetworkCreation * + constraints/compute.trustedImageProjects + * constraints/compute.vmCanIpForward + * constraints/compute.vmExternalIpAccess + * constraints/gcp.detailedAuditLoggingMode + * constraints/gcp.resourceLocations + * constraints/iam.allowedPolicyMemberDomains + * + constraints/iam.automaticIamGrantsForDefaultServiceAccounts + * constraints/iam.disableServiceAccountCreation + * constraints/iam.disableServiceAccountKeyCreation * + constraints/iam.disableServiceAccountKeyUpload * + constraints/iam.restrictCrossProjectServiceAccountLienRemoval + * constraints/iam.serviceAccountKeyExpiryHours + * constraints/resourcemanager.accessBoundaries + * constraints/resourcemanager.allowedExportDestinations + * constraints/sql.restrictAuthorizedNetworks + * + constraints/sql.restrictNoncompliantDiagnosticDataAccess + * constraints/sql.restrictNoncompliantResourceCreation * + constraints/sql.restrictPublicIp + * constraints/storage.publicAccessPrevention + * constraints/storage.restrictAuthTypes + * constraints/storage.uniformBucketLevelAccess This RPC only returns either resources of types - supported by [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types), + [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types) or IAM policies. Returns: diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index bcbff86868..e8dbe9bc1a 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -732,8 +732,8 @@ def query_assets(self) -> Callable[ r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement - compatible with [BigQuery Standard - SQL](http://cloud/bigquery/docs/reference/standard-sql/enabling-standard-sql). + compatible with [BigQuery + SQL](https://cloud.google.com/bigquery/docs/introduction-sql). If the query execution finishes within timeout and there's no pagination, the full query results will be @@ -744,9 +744,9 @@ def query_assets(self) -> Callable[ previous `QueryAssets` call. Note, the query result has approximately 10 GB - limitation enforced by BigQuery - https://cloud.google.com/bigquery/docs/best-practices-performance-output, - queries return larger results will result in errors. + limitation enforced by + [BigQuery](https://cloud.google.com/bigquery/docs/best-practices-performance-output). + Queries return larger results will result in errors. Returns: Callable[[~.QueryAssetsRequest], @@ -988,23 +988,58 @@ def analyze_org_policy_governed_assets(self) -> Callable[ Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This - RPC supports custom constraints and the following 10 - canned constraints: - - * storage.uniformBucketLevelAccess - * iam.disableServiceAccountKeyCreation - * iam.allowedPolicyMemberDomains - * compute.vmExternalIpAccess - * appengine.enforceServiceAccountActAsCheck - * gcp.resourceLocations - * compute.trustedImageProjects - * compute.skipDefaultNetworkCreation - * compute.requireOsLogin - * compute.disableNestedVirtualization + RPC supports custom constraints and the following canned + constraints: + + * constraints/ainotebooks.accessMode + * constraints/ainotebooks.disableFileDownloads + * constraints/ainotebooks.disableRootAccess + * constraints/ainotebooks.disableTerminal + * constraints/ainotebooks.environmentOptions + * constraints/ainotebooks.requireAutoUpgradeSchedule * + constraints/ainotebooks.restrictVpcNetworks + * constraints/compute.disableGuestAttributesAccess * + constraints/compute.disableInstanceDataAccessApis * + constraints/compute.disableNestedVirtualization * + constraints/compute.disableSerialPortAccess + * constraints/compute.disableSerialPortLogging + * constraints/compute.disableVpcExternalIpv6 + * constraints/compute.requireOsLogin + * constraints/compute.requireShieldedVm + * + constraints/compute.restrictLoadBalancerCreationForTypes + * + constraints/compute.restrictProtocolForwardingCreationForTypes + * constraints/compute.restrictXpnProjectLienRemoval * + constraints/compute.setNewProjectDefaultToZonalDNSOnly * + constraints/compute.skipDefaultNetworkCreation * + constraints/compute.trustedImageProjects + * constraints/compute.vmCanIpForward + * constraints/compute.vmExternalIpAccess + * constraints/gcp.detailedAuditLoggingMode + * constraints/gcp.resourceLocations + * constraints/iam.allowedPolicyMemberDomains + * + constraints/iam.automaticIamGrantsForDefaultServiceAccounts + * constraints/iam.disableServiceAccountCreation + * constraints/iam.disableServiceAccountKeyCreation * + constraints/iam.disableServiceAccountKeyUpload * + constraints/iam.restrictCrossProjectServiceAccountLienRemoval + * constraints/iam.serviceAccountKeyExpiryHours + * constraints/resourcemanager.accessBoundaries + * constraints/resourcemanager.allowedExportDestinations + * constraints/sql.restrictAuthorizedNetworks + * + constraints/sql.restrictNoncompliantDiagnosticDataAccess + * constraints/sql.restrictNoncompliantResourceCreation * + constraints/sql.restrictPublicIp + * constraints/storage.publicAccessPrevention + * constraints/storage.restrictAuthTypes + * constraints/storage.uniformBucketLevelAccess This RPC only returns either resources of types - supported by [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types), + [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types) or IAM policies. Returns: diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index d8a9b7f910..43be2cc267 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .asset_enrichment_resourceowners import ( + ResourceOwners, +) from .asset_service import ( AnalyzeIamPolicyLongrunningMetadata, AnalyzeIamPolicyLongrunningRequest, @@ -78,8 +81,10 @@ ) from .assets import ( Asset, + AssetEnrichment, AttachedResource, ConditionEvaluation, + EffectiveTagDetails, IamPolicyAnalysisResult, IamPolicyAnalysisState, IamPolicySearchResult, @@ -90,12 +95,14 @@ RelationshipAttributes, Resource, ResourceSearchResult, + Tag, TemporalAsset, TimeWindow, VersionedResource, ) __all__ = ( + 'ResourceOwners', 'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', @@ -158,8 +165,10 @@ 'UpdateSavedQueryRequest', 'ContentType', 'Asset', + 'AssetEnrichment', 'AttachedResource', 'ConditionEvaluation', + 'EffectiveTagDetails', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', @@ -170,6 +179,7 @@ 'RelationshipAttributes', 'Resource', 'ResourceSearchResult', + 'Tag', 'TemporalAsset', 'TimeWindow', 'VersionedResource', diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_enrichment_resourceowners.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_enrichment_resourceowners.py new file mode 100755 index 0000000000..b9e022e509 --- /dev/null +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_enrichment_resourceowners.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.asset.v1', + manifest={ + 'ResourceOwners', + }, +) + + +class ResourceOwners(proto.Message): + r"""The resource owners information. + + Attributes: + resource_owners (MutableSequence[str]): + List of resource owners. + """ + + resource_owners: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index a2a39db101..76ea3c9260 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -957,7 +957,7 @@ class PartitionKey(proto.Enum): READ_TIME (1): The time when the snapshot is taken. If specified as partition key, the result table(s) - is partitoned by the additional timestamp + is partitioned by the additional timestamp column, readTime. If [read_time] in ExportAssetsRequest is specified, the readTime column's value will be the same as it. @@ -966,10 +966,10 @@ class PartitionKey(proto.Enum): REQUEST_TIME (2): The time when the request is received and started to be processed. If specified as - partition key, the result table(s) is partitoned - by the requestTime column, an additional - timestamp column representing when the request - was received. + partition key, the result table(s) is + partitioned by the requestTime column, an + additional timestamp column representing when + the request was received. """ PARTITION_KEY_UNSPECIFIED = 0 READ_TIME = 1 @@ -1174,72 +1174,109 @@ class SearchAllResourcesRequest(proto.Message): Examples: * `name:Important` to find Google Cloud - resources whose name contains "Important" as a + resources whose name contains `Important` as a word. * `name=Important` to find the Google Cloud - resource whose name is exactly "Important". + resource whose name is exactly `Important`. * `displayName:Impor*` to find Google Cloud - resources whose display name contains "Impor" + resources whose display name contains `Impor` as a prefix of any word in the field. * `location:us-west*` to find Google Cloud - resources whose location contains both "us" - and "west" as prefixes. + resources whose location contains both `us` + and `west` as prefixes. * `labels:prod` to find Google Cloud resources - whose labels contain "prod" as a key or value. + whose labels contain `prod` as a key or value. * `labels.env:prod` to find Google Cloud - resources that have a label "env" and its - value is "prod". + resources that have a label `env` and its + value is `prod`. * `labels.env:*` to find Google Cloud resources - that have a label "env". * `kmsKey:key` to find - Google Cloud resources encrypted with a - customer-managed encryption key whose name - contains "key" as a word. This field is - deprecated. Please use the `kmsKeys` field to + that have a label `env`. * `tagKeys:env` to find + Google Cloud resources that have directly + attached tags where the + [`TagKey.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagKeys#resource:-tagkey) + contains `env`. + * `tagValues:prod*` to find Google Cloud + resources that have directly attached tags + where the + [`TagValue.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + contains a word prefixed by `prod`. + * `tagValueIds=tagValues/123` to find Google + Cloud resources that have directly attached + tags where the + [`TagValue.name`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + is exactly `tagValues/123`. + * `effectiveTagKeys:env` to find Google Cloud + resources that have directly attached or + inherited tags where the + [`TagKey.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagKeys#resource:-tagkey) + contains `env`. + * `effectiveTagValues:prod*` to find Google + Cloud resources that have directly attached or + inherited tags where the + [`TagValue.namespacedName`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + contains a word prefixed by `prod`. + * `effectiveTagValueIds=tagValues/123` to find + Google Cloud resources that have directly + attached or inherited tags where the + [`TagValue.name`](https://cloud.google.com/resource-manager/reference/rest/v3/tagValues#resource:-tagvalue) + is exactly `tagValues/123`. + * `kmsKey:key` to find Google Cloud resources + encrypted with a customer-managed encryption + key whose name contains `key` as a word. This + field is deprecated. Use the `kmsKeys` field to retrieve Cloud KMS key information. * `kmsKeys:key` to find Google Cloud resources encrypted with customer-managed encryption - keys whose name contains the word "key". * + keys whose name contains the word `key`. * `relationships:instance-group-1` to find Google Cloud resources that have relationships with - "instance-group-1" in the related resource name. + `instance-group-1` in the related resource name. * `relationships:INSTANCE_TO_INSTANCEGROUP` to find Compute Engine instances that have relationships of type - "INSTANCE_TO_INSTANCEGROUP". * + `INSTANCE_TO_INSTANCEGROUP`. * `relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1` to find Compute Engine instances that have - relationships with "instance-group-1" in the + relationships with `instance-group-1` in the Compute Engine instance group resource name, for - relationship type "INSTANCE_TO_INSTANCEGROUP". + relationship type `INSTANCE_TO_INSTANCEGROUP`. + * `sccSecurityMarks.key=value` to find Cloud + resources that are attached with security + marks whose key is `key` and value is `value`. * + `sccSecurityMarks.key:*` to find Cloud resources + that are attached with security marks whose + key is `key`. * `state:ACTIVE` to find Google Cloud resources - whose state contains "ACTIVE" as a word. + whose state contains `ACTIVE` as a word. * `NOT state:ACTIVE` to find Google Cloud - resources whose state doesn't contain "ACTIVE" + resources whose state doesn't contain `ACTIVE` as a word. * `createTime<1609459200` to find Google Cloud - resources that were created before "2021-01-01 - 00:00:00 UTC". 1609459200 is the epoch timestamp - of "2021-01-01 00:00:00 UTC" in seconds. + resources that were created before `2021-01-01 + 00:00:00 UTC`. `1609459200` is the epoch + timestamp of `2021-01-01 00:00:00 UTC` in + seconds. * `updateTime>1609459200` to find Google Cloud - resources that were updated after "2021-01-01 - 00:00:00 UTC". 1609459200 is the epoch timestamp - of "2021-01-01 00:00:00 UTC" in seconds. + resources that were updated after `2021-01-01 + 00:00:00 UTC`. `1609459200` is the epoch + timestamp of `2021-01-01 00:00:00 UTC` in + seconds. * `Important` to find Google Cloud resources - that contain "Important" as a word in any of + that contain `Important` as a word in any of the searchable fields. * `Impor*` to find Google Cloud resources that - contain "Impor" as a prefix of any word in any + contain `Impor` as a prefix of any word in any of the searchable fields. * `Important location:(us-west1 OR global)` to find Google - Cloud resources that contain "Important" as a + Cloud resources that contain `Important` as a word in any of the searchable fields and are - also located in the "us-west1" region or the - "global" location. + also located in the `us-west1` region or the + `global` location. asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it will search - all the [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types). + all the asset types [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types). Regular expressions are also supported. For example: @@ -1261,11 +1298,12 @@ class SearchAllResourcesRequest(proto.Message): page_size (int): Optional. The page size for search result pagination. Page size is capped at 500 even if a - larger value is given. If set to zero, server - will pick an appropriate default. Returned - results may be fewer than requested. When this - happens, there could be more results as long as - `next_page_token` is returned. + larger value is given. If set to zero or a + negative value, server will pick an appropriate + default. Returned results may be fewer than + requested. When this happens, there could be + more results as long as `next_page_token` is + returned. page_token (str): Optional. If present, then retrieve the next batch of results from the preceding call to this @@ -1280,8 +1318,8 @@ class SearchAllResourcesRequest(proto.Message): the field name to indicate descending order. Redundant space characters are ignored. Example: "location DESC, name". - Only singular primitive fields in the response - are sortable: + Only the following fields in the response are + sortable: * name * assetType @@ -1294,51 +1332,43 @@ class SearchAllResourcesRequest(proto.Message): * state * parentFullResourceName * parentAssetType - - All the other fields such as repeated fields - (e.g., `networkTags`, `kmsKeys`), map fields - (e.g., `labels`) and struct fields (e.g., - `additionalAttributes`) are not supported. read_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. A comma-separated list of fields - specifying which fields to be returned in - ResourceSearchResult. Only '*' or combination of - top level fields can be specified. Field names - of both snake_case and camelCase are supported. - Examples: `"*"`, `"name,location"`, - `"name,versionedResources"`. - - The read_mask paths must be valid field paths - listed but not limited to (both snake_case and - camelCase are supported): - - * name - * assetType - * project - * displayName - * description - * location - * tagKeys - * tagValues - * tagValueIds - * labels - * networkTags - * kmsKey (This field is deprecated. Please use - the `kmsKeys` field to retrieve Cloud KMS - key information.) - * kmsKeys - * createTime - * updateTime - * state - * additionalAttributes - * versionedResources - - If read_mask is not specified, all fields except - versionedResources will be returned. - If only '*' is specified, all fields including - versionedResources will be returned. - Any invalid field path will trigger - INVALID_ARGUMENT error. + that you want returned in the results. The + following fields are returned by default if not + specified: + + * `name` + * `assetType` + * `project` + * `folders` + * `organization` + * `displayName` + * `description` + * `location` + * `labels` + * `tags` + * `effectiveTags` + * `networkTags` + * `kmsKeys` + * `createTime` + * `updateTime` + * `state` + * `additionalAttributes` + * `parentFullResourceName` + * `parentAssetType` + + Some fields of large size, such as + `versionedResources`, `attachedResources`, + `effectiveTags` etc., are not returned by + default, but you can specify them in the + `read_mask` parameter if you want to include + them. If `"*"` is specified, all [available + fields](https://cloud.google.com/asset-inventory/docs/reference/rest/v1/TopLevel/searchAllResources#resourcesearchresult) + are returned. + Examples: `"name,location"`, + `"name,versionedResources"`, `"*"`. Any invalid + field path will trigger INVALID_ARGUMENT error. """ scope: str = proto.Field( @@ -1485,11 +1515,12 @@ class SearchAllIamPoliciesRequest(proto.Message): page_size (int): Optional. The page size for search result pagination. Page size is capped at 500 even if a - larger value is given. If set to zero, server - will pick an appropriate default. Returned - results may be fewer than requested. When this - happens, there could be more results as long as - `next_page_token` is returned. + larger value is given. If set to zero or a + negative value, server will pick an appropriate + default. Returned results may be fewer than + requested. When this happens, there could be + more results as long as `next_page_token` is + returned. page_token (str): Optional. If present, retrieve the next batch of results from the preceding call to this @@ -1501,8 +1532,8 @@ class SearchAllIamPoliciesRequest(proto.Message): Optional. A list of asset types that the IAM policies are attached to. If empty, it will search the IAM policies that are attached to all - the [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types). + the asset types [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types) Regular expressions are also supported. For example: @@ -1611,10 +1642,10 @@ class IamPolicyAnalysisQuery(proto.Message): "projects/my-project-id"), or a project number (such as "projects/12345"). - To know how to get organization id, visit [here + To know how to get organization ID, visit [here ](https://cloud.google.com/resource-manager/docs/creating-managing-organization#retrieving_your_organization_id). - To know how to get folder or project id, visit + To know how to get folder or project ID, visit [here ](https://cloud.google.com/resource-manager/docs/creating-managing-folders#viewing_or_listing_folders_and_projects). resource_selector (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.ResourceSelector): @@ -1923,7 +1954,7 @@ class AnalyzeIamPolicyRequest(proto.Message): be merged together with the `saved_analysis_query` as base and the `analysis_query` as overrides. For more details - of the merge behavior, please refer to the + of the merge behavior, refer to the [MergeFrom](https://developers.google.com/protocol-buffers/docs/reference/cpp/google.protobuf.message#Message.MergeFrom.details) page. @@ -1974,7 +2005,7 @@ class AnalyzeIamPolicyResponse(proto.Message): request. service_account_impersonation_analysis (MutableSequence[google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis]): The service account impersonation analysis if - [AnalyzeIamPolicyRequest.analyze_service_account_impersonation][] + [IamPolicyAnalysisQuery.Options.analyze_service_account_impersonation][google.cloud.asset.v1.IamPolicyAnalysisQuery.Options.analyze_service_account_impersonation] is enabled. fully_explored (bool): Represents whether all entries in the @@ -2146,7 +2177,7 @@ class PartitionKey(proto.Enum): REQUEST_TIME (1): The time when the request is received. If specified as partition key, the result table(s) - is partitoned by the RequestTime column, an + is partitioned by the RequestTime column, an additional timestamp column representing when the request was received. """ @@ -2207,7 +2238,7 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): be merged together with the `saved_analysis_query` as base and the `analysis_query` as overrides. For more details - of the merge behavior, please refer to the + of the merge behavior, refer to the [MergeFrom](https://developers.google.com/protocol-buffers/docs/reference/cpp/google.protobuf.message#Message.MergeFrom.details) doc. @@ -2563,8 +2594,8 @@ class AnalyzeMoveRequest(proto.Message): organization to reparent the target resource. The analysis will be performed against hypothetically moving the resource to this - specified desitination parent. This can only be - a folder number (such as "folders/123") or an + specified destination parent. This can only be a + folder number (such as "folders/123") or an organization number (such as "organizations/123"). view (google.cloud.asset_v1.types.AnalyzeMoveRequest.AnalysisView): @@ -2793,8 +2824,8 @@ class QueryAssetsRequest(proto.Message): returned. statement (str): Optional. A SQL statement that's compatible - with [BigQuery Standard - SQL](http://cloud/bigquery/docs/reference/standard-sql/enabling-standard-sql). + with [BigQuery + SQL](https://cloud.google.com/bigquery/docs/introduction-sql). This field is a member of `oneof`_ ``query``. job_reference (str): @@ -2927,11 +2958,13 @@ class QueryAssetsResponse(proto.Message): The query response, which can be either an `error` or a valid `response`. If `done` == `false` and the query result is - being saved in a output, the output_config field - will be set. + being saved in an output, the output_config + field will be set. If `done` == `true`, exactly one of `error`, `query_result` or `output_config` will - be set. + be set. [done] is unset unless the + [QueryAssetsResponse] contains a + [QueryAssetsResponse.job_reference]. error (google.rpc.status_pb2.Status): Error status. @@ -2941,10 +2974,10 @@ class QueryAssetsResponse(proto.Message): This field is a member of `oneof`_ ``response``. output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): - Output configuration which indicates instead - of being returned in API response on the fly, - the query result will be saved in a specific - output. + Output configuration, which indicates that + instead of being returned in an API response on + the fly, the query result will be saved in a + specific output. This field is a member of `oneof`_ ``response``. """ @@ -3106,18 +3139,18 @@ class BatchGetEffectiveIamPoliciesRequest(proto.Message): "projects/my-project-id"), or a project number (such as "projects/12345"). - To know how to get organization id, visit [here + To know how to get organization ID, visit [here ](https://cloud.google.com/resource-manager/docs/creating-managing-organization#retrieving_your_organization_id). - To know how to get folder or project id, visit + To know how to get folder or project ID, visit [here ](https://cloud.google.com/resource-manager/docs/creating-managing-folders#viewing_or_listing_folders_and_projects). names (MutableSequence[str]): Required. The names refer to the [full_resource_names] (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of [searchable asset - types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types). + of the asset types [supported by search + APIs](https://cloud.google.com/asset-inventory/docs/supported-asset-types). A maximum of 20 resources' effective policies can be retrieved in a batch. """ @@ -3271,7 +3304,11 @@ class AnalyzerOrgPolicy(proto.Message): """ class Rule(proto.Message): - r"""Represents a rule defined in an organization policy + r"""This rule message is a customized version of the one defined + in the Organization Policy system. In addition to the fields + defined in the original organization policy, it contains + additional field(s) under specific circumstances to support + analysis results. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -3282,9 +3319,9 @@ class Rule(proto.Message): Attributes: values (google.cloud.asset_v1.types.AnalyzerOrgPolicy.Rule.StringValues): - List of values to be used for this - PolicyRule. This field can be set only in - Policies for list constraints. + List of values to be used for this policy + rule. This field can be set only in policies for + list constraints. This field is a member of `oneof`_ ``kind``. allow_all (bool): @@ -3308,6 +3345,22 @@ class Rule(proto.Message): This field is a member of `oneof`_ ``kind``. condition (google.type.expr_pb2.Expr): The evaluating condition for this rule. + condition_evaluation (google.cloud.asset_v1.types.ConditionEvaluation): + The condition evaluation result for this + rule. Only populated if it meets all the + following criteria: + + * There is a + [condition][google.cloud.asset.v1.AnalyzerOrgPolicy.Rule.condition] + defined for this rule. + * This rule is within + [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.consolidated_policy][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.consolidated_policy], + or + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.consolidated_policy][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.consolidated_policy] + when the + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset] + has + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.governed_resource][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.governed_resource]. """ class StringValues(proto.Message): @@ -3355,6 +3408,11 @@ class StringValues(proto.Message): number=7, message=expr_pb2.Expr, ) + condition_evaluation: gca_assets.ConditionEvaluation = proto.Field( + proto.MESSAGE, + number=8, + message=gca_assets.ConditionEvaluation, + ) attached_resource: str = proto.Field( proto.STRING, @@ -3585,11 +3643,19 @@ class MethodType(proto.Enum): DELETE (3): Constraint applied when deleting the resource. + REMOVE_GRANT (4): + Constraint applied when removing an IAM + grant. + GOVERN_TAGS (5): + Constraint applied when enforcing forced + tagging. """ METHOD_TYPE_UNSPECIFIED = 0 CREATE = 1 UPDATE = 2 DELETE = 3 + REMOVE_GRANT = 4 + GOVERN_TAGS = 5 class ActionType(proto.Enum): r"""Allow or deny type. @@ -3674,15 +3740,18 @@ class AnalyzeOrgPoliciesRequest(proto.Message): filter (str): The expression to filter [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - The only supported field is - `consolidated_policy.attached_resource`, and the - only supported operator is `=`. + Filtering is currently available for bare + literal values and the following fields: - Example: + * consolidated_policy.attached_resource + * consolidated_policy.rules.enforce + When filtering by a specific field, the only + supported operator is `=`. For example, + filtering by consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return the org policy results - of"folders/001". + will return all the Organization Policy results + attached to "folders/001". page_size (int): The maximum number of items to return per page. If unspecified, @@ -3744,18 +3813,37 @@ class OrgPolicyResult(proto.Message): The consolidated organization policy for the analyzed resource. The consolidated organization policy is computed by merging and evaluating - [AnalyzeOrgPoliciesResponse.policy_bundle][]. + [policy_bundle][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.OrgPolicyResult.policy_bundle]. The evaluation will respect the organization policy [hierarchy rules](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-hierarchy). policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): The ordered list of all organization policies from the - [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. + [consolidated_policy.attached_resource][google.cloud.asset.v1.AnalyzerOrgPolicy.attached_resource]. to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list. + project (str): + The project that this consolidated policy + belongs to, in the format of + projects/{PROJECT_NUMBER}. This field is + available when the consolidated policy belongs + to a project. + folders (MutableSequence[str]): + The folder(s) that this consolidated policy + belongs to, in the format of + folders/{FOLDER_NUMBER}. This field is available + when the consolidated policy belongs (directly + or cascadingly) to one or more folders. + organization (str): + The organization that this consolidated + policy belongs to, in the format of + organizations/{ORGANIZATION_NUMBER}. This field + is available when the consolidated policy + belongs (directly or cascadingly) to an + organization. """ consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( @@ -3768,6 +3856,18 @@ class OrgPolicyResult(proto.Message): number=2, message='AnalyzerOrgPolicy', ) + project: str = proto.Field( + proto.STRING, + number=3, + ) + folders: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + organization: str = proto.Field( + proto.STRING, + number=5, + ) @property def raw_page(self): @@ -3812,15 +3912,20 @@ class AnalyzeOrgPolicyGovernedContainersRequest(proto.Message): only contains organization policies for the provided constraint. filter (str): - The expression to filter the governed - containers in result. The only supported field - is `parent`, and the only supported operator is - `=`. + The expression to filter + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + Filtering is currently available for bare + literal values and the following fields: - Example: + * parent + * consolidated_policy.rules.enforce + When filtering by a specific field, the only + supported operator is `=`. For example, + filtering by parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all containers under "folders/001". + will return all the containers under + "folders/001". page_size (int): The maximum number of items to return per page. If unspecified, @@ -3897,11 +4002,30 @@ class GovernedContainer(proto.Message): policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): The ordered list of all organization policies from the - [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. + [consolidated_policy.attached_resource][google.cloud.asset.v1.AnalyzerOrgPolicy.attached_resource]. to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list. + project (str): + The project that this resource belongs to, in + the format of projects/{PROJECT_NUMBER}. This + field is available when the resource belongs to + a project. + folders (MutableSequence[str]): + The folder(s) that this resource belongs to, + in the format of folders/{FOLDER_NUMBER}. This + field is available when the resource belongs + (directly or cascadingly) to one or more + folders. + organization (str): + The organization that this resource belongs + to, in the format of + organizations/{ORGANIZATION_NUMBER}. This field + is available when the resource belongs (directly + or cascadingly) to an organization. + effective_tags (MutableSequence[google.cloud.asset_v1.types.EffectiveTagDetails]): + The effective tags on this resource. """ full_resource_name: str = proto.Field( @@ -3922,6 +4046,23 @@ class GovernedContainer(proto.Message): number=4, message='AnalyzerOrgPolicy', ) + project: str = proto.Field( + proto.STRING, + number=5, + ) + folders: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + organization: str = proto.Field( + proto.STRING, + number=7, + ) + effective_tags: MutableSequence[gca_assets.EffectiveTagDetails] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=gca_assets.EffectiveTagDetails, + ) @property def raw_page(self): @@ -3966,26 +4107,43 @@ class AnalyzeOrgPolicyGovernedAssetsRequest(proto.Message): contains analyzed organization policies for the provided constraint. filter (str): - The expression to filter the governed assets - in result. The only supported fields for - governed resources are - `governed_resource.project` and - `governed_resource.folders`. The only supported - fields for governed iam policies are - `governed_iam_policy.project` and - `governed_iam_policy.folders`. The only - supported operator is `=`. - - Example 1: - governed_resource.project="projects/12345678" - filter will return all governed resources under - projects/12345678 including the project ifself, - if applicable. - - Example 2: - governed_iam_policy.folders="folders/12345678" - filter will return all governed iam policies - under folders/12345678, if applicable. + The expression to filter + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + + For governed resources, filtering is currently + available for bare literal values and the + following fields: + + * governed_resource.project + * governed_resource.folders + * consolidated_policy.rules.enforce + When filtering by `governed_resource.project` or + `consolidated_policy.rules.enforce`, the only + supported operator is `=`. When filtering by + `governed_resource.folders`, the supported + operators are `=` and `:`. + For example, filtering by + `governed_resource.project="projects/12345678"` + will return all the governed resources under + "projects/12345678", including the project + itself if applicable. + + For governed IAM policies, filtering is + currently available for bare literal values and + the following fields: + + * governed_iam_policy.project + * governed_iam_policy.folders + * consolidated_policy.rules.enforce + When filtering by `governed_iam_policy.project` + or `consolidated_policy.rules.enforce`, the only + supported operator is `=`. When filtering by + `governed_iam_policy.folders`, the supported + operators are `=` and `:`. + For example, filtering by + `governed_iam_policy.folders:"folders/12345678"` + will return all the governed IAM policies under + "folders/001". page_size (int): The maximum number of items to return per page. If unspecified, @@ -4068,6 +4226,17 @@ class GovernedResource(proto.Message): organizations/{ORGANIZATION_NUMBER}. This field is available when the resource belongs (directly or cascadingly) to an organization. + asset_type (str): + The asset type of the + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name] + Example: + + `cloudresourcemanager.googleapis.com/Project` + See [Cloud Asset Inventory Supported Asset + Types](https://cloud.google.com/asset-inventory/docs/supported-asset-types) + for all supported asset types. + effective_tags (MutableSequence[google.cloud.asset_v1.types.EffectiveTagDetails]): + The effective tags on this resource. """ full_resource_name: str = proto.Field( @@ -4090,6 +4259,15 @@ class GovernedResource(proto.Message): proto.STRING, number=7, ) + asset_type: str = proto.Field( + proto.STRING, + number=8, + ) + effective_tags: MutableSequence[gca_assets.EffectiveTagDetails] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=gca_assets.EffectiveTagDetails, + ) class GovernedIamPolicy(proto.Message): r"""The IAM policies governed by the organization policies of the @@ -4097,8 +4275,8 @@ class GovernedIamPolicy(proto.Message): Attributes: attached_resource (str): - The full resource name of the resource - associated with this IAM policy. Example: + The full resource name of the resource on + which this IAM policy is set. Example: `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. See [Cloud Asset Inventory Resource Name @@ -4124,6 +4302,15 @@ class GovernedIamPolicy(proto.Message): organizations/{ORGANIZATION_NUMBER}. This field is available when the IAM policy belongs (directly or cascadingly) to an organization. + asset_type (str): + The asset type of the + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy.attached_resource][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy.attached_resource]. + Example: + + `cloudresourcemanager.googleapis.com/Project` + See [Cloud Asset Inventory Supported Asset + Types](https://cloud.google.com/asset-inventory/docs/supported-asset-types) + for all supported asset types. """ attached_resource: str = proto.Field( @@ -4147,6 +4334,10 @@ class GovernedIamPolicy(proto.Message): proto.STRING, number=7, ) + asset_type: str = proto.Field( + proto.STRING, + number=8, + ) class GovernedAsset(proto.Message): r"""Represents a Google Cloud asset(resource or IAM policy) @@ -4184,7 +4375,7 @@ class GovernedAsset(proto.Message): policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): The ordered list of all organization policies from the - [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][] + [consolidated_policy.attached_resource][google.cloud.asset.v1.AnalyzerOrgPolicy.attached_resource] to the scope specified in the request. If the constraint is defined with default diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index b2755be6aa..071bc9d4a3 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -19,6 +19,7 @@ import proto # type: ignore +from google.cloud.asset_v1.types import asset_enrichment_resourceowners from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore from google.cloud.osconfig.v1 import inventory_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -35,11 +36,14 @@ manifest={ 'TemporalAsset', 'TimeWindow', + 'AssetEnrichment', 'Asset', 'Resource', 'RelatedAssets', 'RelationshipAttributes', 'RelatedAsset', + 'Tag', + 'EffectiveTagDetails', 'ResourceSearchResult', 'VersionedResource', 'AttachedResource', @@ -145,6 +149,30 @@ class TimeWindow(proto.Message): ) +class AssetEnrichment(proto.Message): + r"""The enhanced metadata information for a resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + resource_owners (google.cloud.asset_v1.types.ResourceOwners): + The resource owners for a resource. + + Note that this field only contains the members + that have "roles/owner" role in the resource's + IAM Policy. + + This field is a member of `oneof`_ ``EnrichmentData``. + """ + + resource_owners: asset_enrichment_resourceowners.ResourceOwners = proto.Field( + proto.MESSAGE, + number=7, + oneof='EnrichmentData', + message=asset_enrichment_resourceowners.ResourceOwners, + ) + + class Asset(proto.Message): r"""An asset in Google Cloud. An asset can be any resource in the Google Cloud [resource @@ -205,18 +233,17 @@ class Asset(proto.Message): with different constraints set on a given resource. access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): - Please also refer to the [access policy user + Also refer to the [access policy user guide](https://cloud.google.com/access-context-manager/docs/overview#access-policies). This field is a member of `oneof`_ ``access_context_policy``. access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): - Please also refer to the [access level user + Also refer to the [access level user guide](https://cloud.google.com/access-context-manager/docs/overview#access-levels). This field is a member of `oneof`_ ``access_context_policy``. service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): - Please also refer to the [service perimeter - user + Also refer to the [service perimeter user guide](https://cloud.google.com/vpc-service-controls/docs/overview). This field is a member of `oneof`_ ``access_context_policy``. @@ -358,9 +385,6 @@ class Resource(proto.Message): Example: `//cloudresourcemanager.googleapis.com/projects/my_project_123` - - For third-party assets, this field may be set - differently. data (google.protobuf.struct_pb2.Struct): The content of the resource, in which some sensitive fields are removed and may not be @@ -527,9 +551,103 @@ class RelatedAsset(proto.Message): ) +class Tag(proto.Message): + r"""The key and value for a + [tag](https://cloud.google.com/resource-manager/docs/tags/tags-overview). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tag_key (str): + TagKey namespaced name, in the format of + {ORG_ID}/{TAG_KEY_SHORT_NAME}. + + This field is a member of `oneof`_ ``_tag_key``. + tag_key_id (str): + TagKey ID, in the format of + tagKeys/{TAG_KEY_ID}. + + This field is a member of `oneof`_ ``_tag_key_id``. + tag_value (str): + TagValue namespaced name, in the format of + {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. + + This field is a member of `oneof`_ ``_tag_value``. + tag_value_id (str): + TagValue ID, in the format of + tagValues/{TAG_VALUE_ID}. + + This field is a member of `oneof`_ ``_tag_value_id``. + """ + + tag_key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tag_key_id: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + tag_value: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + tag_value_id: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class EffectiveTagDetails(proto.Message): + r"""The effective tags and the ancestor resources from which they + were inherited. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + attached_resource (str): + The [full resource + name](https://cloud.google.com/asset-inventory/docs/resource-name-format) + of the ancestor from which + [effective_tags][google.cloud.asset.v1.EffectiveTagDetails.effective_tags] + are inherited, according to [tag + inheritance](https://cloud.google.com/resource-manager/docs/tags/tags-overview#inheritance). + + This field is a member of `oneof`_ ``_attached_resource``. + effective_tags (MutableSequence[google.cloud.asset_v1.types.Tag]): + The effective tags inherited from the + [attached_resource][google.cloud.asset.v1.EffectiveTagDetails.attached_resource]. + Note that tags with the same key but different + values may attach to resources at a different + hierarchy levels. The lower hierarchy tag value + will overwrite the higher hierarchy tag value of + the same tag key. In this case, the tag value at + the higher hierarchy level will be removed. For + more information, see [tag + inheritance](https://cloud.google.com/resource-manager/docs/tags/tags-overview#inheritance). + """ + + attached_resource: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + effective_tags: MutableSequence['Tag'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Tag', + ) + + class ResourceSearchResult(proto.Message): r"""A result of Resource Search, containing information of a - cloud resource. Next ID: 32 + cloud resource. Attributes: name (str): @@ -621,8 +739,8 @@ class ResourceSearchResult(proto.Message): `location:us-west*` * Use a free text query. Example: `us-west*` labels (MutableMapping[str, str]): - Labels associated with this resource. See - [Labelling and grouping Google Cloud + User labels associated with this resource. + See [Labelling and grouping Google Cloud resources](https://cloud.google.com/blog/products/gcp/labelling-and-grouping-your-google-cloud-platform-resources) for more information. This field is available only when the resource's Protobuf contains it. @@ -660,11 +778,11 @@ class ResourceSearchResult(proto.Message): name. This field only presents for the purpose of - backward compatibility. Please use the - `kms_keys` field to retrieve Cloud KMS key - information. This field is available only when - the resource's Protobuf contains it and will - only be populated for [these resource + backward compatibility. Use the `kms_keys` field + to retrieve Cloud KMS key information. This + field is available only when the resource's + Protobuf contains it and will only be populated + for [these resource types](https://cloud.google.com/asset-inventory/docs/legacy-field-names#resource_types_with_the_to_be_deprecated_kmskey_field) for backward compatible purposes. @@ -755,7 +873,7 @@ class ResourceSearchResult(proto.Message): provided by the corresponding Google Cloud service (e.g., Compute Engine). see [API references and supported searchable - attributes](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types) + attributes](https://cloud.google.com/asset-inventory/docs/supported-asset-types) to see which fields are included. You can search values of these fields through @@ -814,6 +932,10 @@ class ResourceSearchResult(proto.Message): relationship types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#supported_relationship_types). tag_keys (MutableSequence[str]): + This field is only present for the purpose of + backward compatibility. Use the `tags` field + instead. + TagKey namespaced names, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}. To search against the `tagKeys`: @@ -828,6 +950,10 @@ class ResourceSearchResult(proto.Message): - `env` tag_values (MutableSequence[str]): + This field is only present for the purpose of + backward compatibility. Use the `tags` field + instead. + TagValue namespaced names, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. To search against the `tagValues`: @@ -843,18 +969,90 @@ class ResourceSearchResult(proto.Message): - `prod` tag_value_ids (MutableSequence[str]): + This field is only present for the purpose of + backward compatibility. Use the `tags` field + instead. + TagValue IDs, in the format of tagValues/{TAG_VALUE_ID}. To search against the `tagValueIds`: * Use a field query. Example: - - `tagValueIds:"456"` - `tagValueIds="tagValues/456"` * Use a free text query. Example: - `456` + tags (MutableSequence[google.cloud.asset_v1.types.Tag]): + The tags directly attached to this resource. + + To search against the `tags`: + + * Use a field query. Example: + + - `tagKeys:"123456789/env*"` + - `tagKeys="123456789/env"` + - `tagKeys:"env"` + - `tagKeyIds="tagKeys/123"` + - `tagValues:"env"` + - `tagValues:"env/prod"` + - `tagValues:"123456789/env/prod*"` + - `tagValues="123456789/env/prod"` + - `tagValueIds="tagValues/456"` + + * Use a free text query. Example: + + - `env/prod` + effective_tags (MutableSequence[google.cloud.asset_v1.types.EffectiveTagDetails]): + The effective tags on this resource. All of + the tags that are both attached to and inherited + by a resource are collectively called the + effective tags. For more information, see [tag + inheritance](https://cloud.google.com/resource-manager/docs/tags/tags-overview#inheritance). + + To search against the `effective_tags`: + + * Use a field query. Example: + + - `effectiveTagKeys:"123456789/env*"` + - `effectiveTagKeys="123456789/env"` + - `effectiveTagKeys:"env"` + - `effectiveTagKeyIds="tagKeys/123"` + - `effectiveTagValues:"env"` + - `effectiveTagValues:"env/prod"` + - `effectiveTagValues:"123456789/env/prod*"` + - `effectiveTagValues="123456789/env/prod"` + - `effectiveTagValueIds="tagValues/456"` + enrichments (MutableSequence[google.cloud.asset_v1.types.AssetEnrichment]): + Enrichments of the asset. Currently supported + enrichment types with SearchAllResources API: + + * RESOURCE_OWNERS + + The corresponding read masks in order to get the + enrichment: + + * enrichments.resource_owners + + The corresponding required permissions: + + * + cloudasset.assets.searchEnrichmentResourceOwners + + Example query to get resource owner enrichment: + + ``` + scope: "projects/my-project" + query: "name: my-project" + assetTypes: + "cloudresourcemanager.googleapis.com/Project" + readMask: { + paths: "asset_type" + paths: "name" + paths: "enrichments.resource_owners" + } + ``` parent_asset_type (str): The type of this resource's immediate parent, if there is one. @@ -866,6 +1064,18 @@ class ResourceSearchResult(proto.Message): * Use a free text query. Example: `cloudresourcemanager.googleapis.com/Project` + scc_security_marks (MutableMapping[str, str]): + The actual content of Security Command Center + security marks associated with the asset. + + To search against SCC SecurityMarks field: + + * Use a field query: + + - query by a given key value pair. Example: + `sccSecurityMarks.foo=bar` + - query by a given key's existence. Example: + `sccSecurityMarks.foo:*` """ name: str = proto.Field( @@ -968,10 +1178,30 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=26, ) + tags: MutableSequence['Tag'] = proto.RepeatedField( + proto.MESSAGE, + number=29, + message='Tag', + ) + effective_tags: MutableSequence['EffectiveTagDetails'] = proto.RepeatedField( + proto.MESSAGE, + number=30, + message='EffectiveTagDetails', + ) + enrichments: MutableSequence['AssetEnrichment'] = proto.RepeatedField( + proto.MESSAGE, + number=31, + message='AssetEnrichment', + ) parent_asset_type: str = proto.Field( proto.STRING, number=103, ) + scc_security_marks: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=32, + ) class VersionedResource(proto.Message): @@ -1005,7 +1235,7 @@ class VersionedResource(proto.Message): You can find the resource definition for each supported resource type in this table: - `https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types` + `https://cloud.google.com/asset-inventory/docs/supported-asset-types` """ version: str = proto.Field( @@ -1033,7 +1263,7 @@ class AttachedResource(proto.Message): You can find the supported attached asset types of each resource in this table: - `https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types` + `https://cloud.google.com/asset-inventory/docs/supported-asset-types` versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): Versioned resource representations of this attached resource. This is repeated because @@ -1126,7 +1356,7 @@ class IamPolicySearchResult(proto.Message): set on a resource (like VM instance, Cloud Storage bucket), the project field will indicate the project that contains the resource. If an - IAM policy is set on a folder or orgnization, + IAM policy is set on a folder or organization, this field will be empty. To search against the `project`: @@ -1292,7 +1522,7 @@ class IamPolicyAnalysisState(proto.Message): class ConditionEvaluation(proto.Message): - r"""The Condition evaluation. + r"""The condition evaluation. Attributes: evaluation_value (google.cloud.asset_v1.types.ConditionEvaluation.EvaluationValue): @@ -1312,7 +1542,7 @@ class EvaluationValue(proto.Enum): The evaluation result is `conditional` when the condition expression contains variables that are either missing input values or have not been - supported by Analyzer yet. + supported by Policy Analyzer yet. """ EVALUATION_VALUE_UNSPECIFIED = 0 TRUE = 1 @@ -1420,10 +1650,11 @@ class Identity(proto.Message): Attributes: name (str): - The identity name in any form of members - appear in [IAM policy - binding](https://cloud.google.com/iam/reference/rest/v1/Binding), - such as: + The identity of members, formatted as appear + in an [IAM policy + binding](https://cloud.google.com/iam/reference/rest/v1/Binding). + For example, they might be formatted like the + following: - user:foo@google.com - group:group1@google.com @@ -1431,7 +1662,6 @@ class Identity(proto.Message): - projectOwner:some_project_id - domain:google.com - allUsers - - etc. analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): The analysis state of this identity. """ diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index a343867284..f271f7c964 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -26,34 +26,67 @@ from .types.discovery import EventType from .types.discovery import FilteringAttribute from .types.discovery import Provider +from .types.enrollment import Enrollment from .types.eventarc import CreateChannelConnectionRequest from .types.eventarc import CreateChannelRequest +from .types.eventarc import CreateEnrollmentRequest +from .types.eventarc import CreateGoogleApiSourceRequest +from .types.eventarc import CreateMessageBusRequest +from .types.eventarc import CreatePipelineRequest from .types.eventarc import CreateTriggerRequest from .types.eventarc import DeleteChannelConnectionRequest from .types.eventarc import DeleteChannelRequest +from .types.eventarc import DeleteEnrollmentRequest +from .types.eventarc import DeleteGoogleApiSourceRequest +from .types.eventarc import DeleteMessageBusRequest +from .types.eventarc import DeletePipelineRequest from .types.eventarc import DeleteTriggerRequest from .types.eventarc import GetChannelConnectionRequest from .types.eventarc import GetChannelRequest +from .types.eventarc import GetEnrollmentRequest +from .types.eventarc import GetGoogleApiSourceRequest from .types.eventarc import GetGoogleChannelConfigRequest +from .types.eventarc import GetMessageBusRequest +from .types.eventarc import GetPipelineRequest from .types.eventarc import GetProviderRequest from .types.eventarc import GetTriggerRequest from .types.eventarc import ListChannelConnectionsRequest from .types.eventarc import ListChannelConnectionsResponse from .types.eventarc import ListChannelsRequest from .types.eventarc import ListChannelsResponse +from .types.eventarc import ListEnrollmentsRequest +from .types.eventarc import ListEnrollmentsResponse +from .types.eventarc import ListGoogleApiSourcesRequest +from .types.eventarc import ListGoogleApiSourcesResponse +from .types.eventarc import ListMessageBusEnrollmentsRequest +from .types.eventarc import ListMessageBusEnrollmentsResponse +from .types.eventarc import ListMessageBusesRequest +from .types.eventarc import ListMessageBusesResponse +from .types.eventarc import ListPipelinesRequest +from .types.eventarc import ListPipelinesResponse from .types.eventarc import ListProvidersRequest from .types.eventarc import ListProvidersResponse from .types.eventarc import ListTriggersRequest from .types.eventarc import ListTriggersResponse from .types.eventarc import OperationMetadata from .types.eventarc import UpdateChannelRequest +from .types.eventarc import UpdateEnrollmentRequest +from .types.eventarc import UpdateGoogleApiSourceRequest from .types.eventarc import UpdateGoogleChannelConfigRequest +from .types.eventarc import UpdateMessageBusRequest +from .types.eventarc import UpdatePipelineRequest from .types.eventarc import UpdateTriggerRequest +from .types.google_api_source import GoogleApiSource from .types.google_channel_config import GoogleChannelConfig +from .types.logging_config import LoggingConfig +from .types.message_bus import MessageBus +from .types.network_config import NetworkConfig +from .types.pipeline import Pipeline from .types.trigger import CloudRun from .types.trigger import Destination from .types.trigger import EventFilter from .types.trigger import GKE +from .types.trigger import HttpEndpoint from .types.trigger import Pubsub from .types.trigger import StateCondition from .types.trigger import Transport @@ -66,11 +99,20 @@ 'CloudRun', 'CreateChannelConnectionRequest', 'CreateChannelRequest', +'CreateEnrollmentRequest', +'CreateGoogleApiSourceRequest', +'CreateMessageBusRequest', +'CreatePipelineRequest', 'CreateTriggerRequest', 'DeleteChannelConnectionRequest', 'DeleteChannelRequest', +'DeleteEnrollmentRequest', +'DeleteGoogleApiSourceRequest', +'DeleteMessageBusRequest', +'DeletePipelineRequest', 'DeleteTriggerRequest', 'Destination', +'Enrollment', 'EventFilter', 'EventType', 'EventarcClient', @@ -78,25 +120,49 @@ 'GKE', 'GetChannelConnectionRequest', 'GetChannelRequest', +'GetEnrollmentRequest', +'GetGoogleApiSourceRequest', 'GetGoogleChannelConfigRequest', +'GetMessageBusRequest', +'GetPipelineRequest', 'GetProviderRequest', 'GetTriggerRequest', +'GoogleApiSource', 'GoogleChannelConfig', +'HttpEndpoint', 'ListChannelConnectionsRequest', 'ListChannelConnectionsResponse', 'ListChannelsRequest', 'ListChannelsResponse', +'ListEnrollmentsRequest', +'ListEnrollmentsResponse', +'ListGoogleApiSourcesRequest', +'ListGoogleApiSourcesResponse', +'ListMessageBusEnrollmentsRequest', +'ListMessageBusEnrollmentsResponse', +'ListMessageBusesRequest', +'ListMessageBusesResponse', +'ListPipelinesRequest', +'ListPipelinesResponse', 'ListProvidersRequest', 'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', +'LoggingConfig', +'MessageBus', +'NetworkConfig', 'OperationMetadata', +'Pipeline', 'Provider', 'Pubsub', 'StateCondition', 'Transport', 'Trigger', 'UpdateChannelRequest', +'UpdateEnrollmentRequest', +'UpdateGoogleApiSourceRequest', 'UpdateGoogleChannelConfigRequest', +'UpdateMessageBusRequest', +'UpdatePipelineRequest', 'UpdateTriggerRequest', ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json index e560553160..0f72626543 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json @@ -20,6 +20,26 @@ "create_channel_connection" ] }, + "CreateEnrollment": { + "methods": [ + "create_enrollment" + ] + }, + "CreateGoogleApiSource": { + "methods": [ + "create_google_api_source" + ] + }, + "CreateMessageBus": { + "methods": [ + "create_message_bus" + ] + }, + "CreatePipeline": { + "methods": [ + "create_pipeline" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" @@ -35,6 +55,26 @@ "delete_channel_connection" ] }, + "DeleteEnrollment": { + "methods": [ + "delete_enrollment" + ] + }, + "DeleteGoogleApiSource": { + "methods": [ + "delete_google_api_source" + ] + }, + "DeleteMessageBus": { + "methods": [ + "delete_message_bus" + ] + }, + "DeletePipeline": { + "methods": [ + "delete_pipeline" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" @@ -50,11 +90,31 @@ "get_channel_connection" ] }, + "GetEnrollment": { + "methods": [ + "get_enrollment" + ] + }, + "GetGoogleApiSource": { + "methods": [ + "get_google_api_source" + ] + }, "GetGoogleChannelConfig": { "methods": [ "get_google_channel_config" ] }, + "GetMessageBus": { + "methods": [ + "get_message_bus" + ] + }, + "GetPipeline": { + "methods": [ + "get_pipeline" + ] + }, "GetProvider": { "methods": [ "get_provider" @@ -75,6 +135,31 @@ "list_channels" ] }, + "ListEnrollments": { + "methods": [ + "list_enrollments" + ] + }, + "ListGoogleApiSources": { + "methods": [ + "list_google_api_sources" + ] + }, + "ListMessageBusEnrollments": { + "methods": [ + "list_message_bus_enrollments" + ] + }, + "ListMessageBuses": { + "methods": [ + "list_message_buses" + ] + }, + "ListPipelines": { + "methods": [ + "list_pipelines" + ] + }, "ListProviders": { "methods": [ "list_providers" @@ -90,11 +175,31 @@ "update_channel" ] }, + "UpdateEnrollment": { + "methods": [ + "update_enrollment" + ] + }, + "UpdateGoogleApiSource": { + "methods": [ + "update_google_api_source" + ] + }, "UpdateGoogleChannelConfig": { "methods": [ "update_google_channel_config" ] }, + "UpdateMessageBus": { + "methods": [ + "update_message_bus" + ] + }, + "UpdatePipeline": { + "methods": [ + "update_pipeline" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" @@ -115,6 +220,26 @@ "create_channel_connection" ] }, + "CreateEnrollment": { + "methods": [ + "create_enrollment" + ] + }, + "CreateGoogleApiSource": { + "methods": [ + "create_google_api_source" + ] + }, + "CreateMessageBus": { + "methods": [ + "create_message_bus" + ] + }, + "CreatePipeline": { + "methods": [ + "create_pipeline" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" @@ -130,6 +255,26 @@ "delete_channel_connection" ] }, + "DeleteEnrollment": { + "methods": [ + "delete_enrollment" + ] + }, + "DeleteGoogleApiSource": { + "methods": [ + "delete_google_api_source" + ] + }, + "DeleteMessageBus": { + "methods": [ + "delete_message_bus" + ] + }, + "DeletePipeline": { + "methods": [ + "delete_pipeline" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" @@ -145,11 +290,31 @@ "get_channel_connection" ] }, + "GetEnrollment": { + "methods": [ + "get_enrollment" + ] + }, + "GetGoogleApiSource": { + "methods": [ + "get_google_api_source" + ] + }, "GetGoogleChannelConfig": { "methods": [ "get_google_channel_config" ] }, + "GetMessageBus": { + "methods": [ + "get_message_bus" + ] + }, + "GetPipeline": { + "methods": [ + "get_pipeline" + ] + }, "GetProvider": { "methods": [ "get_provider" @@ -170,6 +335,31 @@ "list_channels" ] }, + "ListEnrollments": { + "methods": [ + "list_enrollments" + ] + }, + "ListGoogleApiSources": { + "methods": [ + "list_google_api_sources" + ] + }, + "ListMessageBusEnrollments": { + "methods": [ + "list_message_bus_enrollments" + ] + }, + "ListMessageBuses": { + "methods": [ + "list_message_buses" + ] + }, + "ListPipelines": { + "methods": [ + "list_pipelines" + ] + }, "ListProviders": { "methods": [ "list_providers" @@ -185,11 +375,31 @@ "update_channel" ] }, + "UpdateEnrollment": { + "methods": [ + "update_enrollment" + ] + }, + "UpdateGoogleApiSource": { + "methods": [ + "update_google_api_source" + ] + }, "UpdateGoogleChannelConfig": { "methods": [ "update_google_channel_config" ] }, + "UpdateMessageBus": { + "methods": [ + "update_message_bus" + ] + }, + "UpdatePipeline": { + "methods": [ + "update_pipeline" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" @@ -210,6 +420,26 @@ "create_channel_connection" ] }, + "CreateEnrollment": { + "methods": [ + "create_enrollment" + ] + }, + "CreateGoogleApiSource": { + "methods": [ + "create_google_api_source" + ] + }, + "CreateMessageBus": { + "methods": [ + "create_message_bus" + ] + }, + "CreatePipeline": { + "methods": [ + "create_pipeline" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" @@ -225,6 +455,26 @@ "delete_channel_connection" ] }, + "DeleteEnrollment": { + "methods": [ + "delete_enrollment" + ] + }, + "DeleteGoogleApiSource": { + "methods": [ + "delete_google_api_source" + ] + }, + "DeleteMessageBus": { + "methods": [ + "delete_message_bus" + ] + }, + "DeletePipeline": { + "methods": [ + "delete_pipeline" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" @@ -240,11 +490,31 @@ "get_channel_connection" ] }, + "GetEnrollment": { + "methods": [ + "get_enrollment" + ] + }, + "GetGoogleApiSource": { + "methods": [ + "get_google_api_source" + ] + }, "GetGoogleChannelConfig": { "methods": [ "get_google_channel_config" ] }, + "GetMessageBus": { + "methods": [ + "get_message_bus" + ] + }, + "GetPipeline": { + "methods": [ + "get_pipeline" + ] + }, "GetProvider": { "methods": [ "get_provider" @@ -265,6 +535,31 @@ "list_channels" ] }, + "ListEnrollments": { + "methods": [ + "list_enrollments" + ] + }, + "ListGoogleApiSources": { + "methods": [ + "list_google_api_sources" + ] + }, + "ListMessageBusEnrollments": { + "methods": [ + "list_message_bus_enrollments" + ] + }, + "ListMessageBuses": { + "methods": [ + "list_message_buses" + ] + }, + "ListPipelines": { + "methods": [ + "list_pipelines" + ] + }, "ListProviders": { "methods": [ "list_providers" @@ -280,11 +575,31 @@ "update_channel" ] }, + "UpdateEnrollment": { + "methods": [ + "update_enrollment" + ] + }, + "UpdateGoogleApiSource": { + "methods": [ + "update_google_api_source" + ] + }, "UpdateGoogleChannelConfig": { "methods": [ "update_google_channel_config" ] }, + "UpdateMessageBus": { + "methods": [ + "update_message_bus" + ] + }, + "UpdatePipeline": { + "methods": [ + "update_pipeline" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 56293ea1c5..5b51875ca1 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -42,9 +42,18 @@ from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment +from google.cloud.eventarc_v1.types import enrollment as gce_enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source +from google.cloud.eventarc_v1.types import google_api_source as gce_google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import logging_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import message_bus as gce_message_bus +from google.cloud.eventarc_v1.types import pipeline +from google.cloud.eventarc_v1.types import pipeline as gce_pipeline from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 # type: ignore @@ -88,14 +97,26 @@ class EventarcAsyncClient: parse_cloud_function_path = staticmethod(EventarcClient.parse_cloud_function_path) crypto_key_path = staticmethod(EventarcClient.crypto_key_path) parse_crypto_key_path = staticmethod(EventarcClient.parse_crypto_key_path) + enrollment_path = staticmethod(EventarcClient.enrollment_path) + parse_enrollment_path = staticmethod(EventarcClient.parse_enrollment_path) + google_api_source_path = staticmethod(EventarcClient.google_api_source_path) + parse_google_api_source_path = staticmethod(EventarcClient.parse_google_api_source_path) google_channel_config_path = staticmethod(EventarcClient.google_channel_config_path) parse_google_channel_config_path = staticmethod(EventarcClient.parse_google_channel_config_path) + message_bus_path = staticmethod(EventarcClient.message_bus_path) + parse_message_bus_path = staticmethod(EventarcClient.parse_message_bus_path) + network_attachment_path = staticmethod(EventarcClient.network_attachment_path) + parse_network_attachment_path = staticmethod(EventarcClient.parse_network_attachment_path) + pipeline_path = staticmethod(EventarcClient.pipeline_path) + parse_pipeline_path = staticmethod(EventarcClient.parse_pipeline_path) provider_path = staticmethod(EventarcClient.provider_path) parse_provider_path = staticmethod(EventarcClient.parse_provider_path) service_path = staticmethod(EventarcClient.service_path) parse_service_path = staticmethod(EventarcClient.parse_service_path) service_account_path = staticmethod(EventarcClient.service_account_path) parse_service_account_path = staticmethod(EventarcClient.parse_service_account_path) + topic_path = staticmethod(EventarcClient.topic_path) + parse_topic_path = staticmethod(EventarcClient.parse_topic_path) trigger_path = staticmethod(EventarcClient.trigger_path) parse_trigger_path = staticmethod(EventarcClient.parse_trigger_path) workflow_path = staticmethod(EventarcClient.workflow_path) @@ -555,7 +576,6 @@ async def sample_create_trigger(): parent="parent_value", trigger=trigger, trigger_id="trigger_id_value", - validate_only=True, ) # Make the request @@ -696,7 +716,6 @@ async def sample_update_trigger(): # Initialize request argument(s) request = eventarc_v1.UpdateTriggerRequest( - validate_only=True, ) # Make the request @@ -843,7 +862,6 @@ async def sample_delete_trigger(): # Initialize request argument(s) request = eventarc_v1.DeleteTriggerRequest( name="name_value", - validate_only=True, ) # Make the request @@ -1219,7 +1237,6 @@ async def sample_create_channel(): parent="parent_value", channel=channel, channel_id="channel_id_value", - validate_only=True, ) # Make the request @@ -1365,7 +1382,6 @@ async def sample_update_channel(): # Initialize request argument(s) request = eventarc_v1.UpdateChannelRequest( - validate_only=True, ) # Make the request @@ -1506,7 +1522,6 @@ async def sample_delete_channel(): # Initialize request argument(s) request = eventarc_v1.DeleteChannelRequest( name="name_value", - validate_only=True, ) # Make the request @@ -2358,7 +2373,9 @@ async def get_google_channel_config(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> google_channel_config.GoogleChannelConfig: - r"""Get a GoogleChannelConfig + r"""Get a GoogleChannelConfig. + The name of the GoogleChannelConfig in the response is + ALWAYS coded with projectID. .. code-block:: python @@ -2589,6 +2606,2767 @@ async def sample_update_google_channel_config(): # Done; return the response. return response + async def get_message_bus(self, + request: Optional[Union[eventarc.GetMessageBusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> message_bus.MessageBus: + r"""Get a single MessageBus. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetMessageBusRequest( + name="name_value", + ) + + # Make the request + response = await client.get_message_bus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetMessageBusRequest, dict]]): + The request object. The request message for the + GetMessageBus method. + name (:class:`str`): + Required. The name of the message bus + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.MessageBus: + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetMessageBusRequest): + request = eventarc.GetMessageBusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_message_buses(self, + request: Optional[Union[eventarc.ListMessageBusesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMessageBusesAsyncPager: + r"""List message buses. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_message_buses(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_buses(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListMessageBusesRequest, dict]]): + The request object. The request message for the + ListMessageBuses method. + parent (:class:`str`): + Required. The parent collection to + list message buses on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusesAsyncPager: + The response message for the + `ListMessageBuses` method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListMessageBusesRequest): + request = eventarc.ListMessageBusesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_message_buses] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMessageBusesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_message_bus_enrollments(self, + request: Optional[Union[eventarc.ListMessageBusEnrollmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMessageBusEnrollmentsAsyncPager: + r"""List message bus enrollments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_message_bus_enrollments(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_bus_enrollments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsRequest, dict]]): + The request object. The request message for the + `ListMessageBusEnrollments` method. + parent (:class:`str`): + Required. The parent message bus to + list enrollments on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusEnrollmentsAsyncPager: + The response message for the + `ListMessageBusEnrollments` method.` + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListMessageBusEnrollmentsRequest): + request = eventarc.ListMessageBusEnrollmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_message_bus_enrollments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMessageBusEnrollmentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_message_bus(self, + request: Optional[Union[eventarc.CreateMessageBusRequest, dict]] = None, + *, + parent: Optional[str] = None, + message_bus: Optional[gce_message_bus.MessageBus] = None, + message_bus_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new MessageBus in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.CreateMessageBusRequest( + parent="parent_value", + message_bus_id="message_bus_id_value", + ) + + # Make the request + operation = client.create_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreateMessageBusRequest, dict]]): + The request object. The request message for the + CreateMessageBus method. + parent (:class:`str`): + Required. The parent collection in + which to add this message bus. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + message_bus (:class:`google.cloud.eventarc_v1.types.MessageBus`): + Required. The message bus to create. + This corresponds to the ``message_bus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + message_bus_id (:class:`str`): + Required. The user-provided ID to be + assigned to the MessageBus. It should + match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``message_bus_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.MessageBus` + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, message_bus, message_bus_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateMessageBusRequest): + request = eventarc.CreateMessageBusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if message_bus is not None: + request.message_bus = message_bus + if message_bus_id is not None: + request.message_bus_id = message_bus_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_message_bus.MessageBus, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_message_bus(self, + request: Optional[Union[eventarc.UpdateMessageBusRequest, dict]] = None, + *, + message_bus: Optional[gce_message_bus.MessageBus] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single message bus. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateMessageBusRequest( + ) + + # Make the request + operation = client.update_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateMessageBusRequest, dict]]): + The request object. The request message for the + UpdateMessageBus method. + message_bus (:class:`google.cloud.eventarc_v1.types.MessageBus`): + Required. The MessageBus to be + updated. + + This corresponds to the ``message_bus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.MessageBus` + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [message_bus, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateMessageBusRequest): + request = eventarc.UpdateMessageBusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if message_bus is not None: + request.message_bus = message_bus + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("message_bus.name", request.message_bus.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_message_bus.MessageBus, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_message_bus(self, + request: Optional[Union[eventarc.DeleteMessageBusRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single message bus. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteMessageBusRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteMessageBusRequest, dict]]): + The request object. The request message for the + DeleteMessageBus method. + name (:class:`str`): + Required. The name of the MessageBus + to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (:class:`str`): + Optional. If provided, the MessageBus + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.MessageBus` + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteMessageBusRequest): + request = eventarc.DeleteMessageBusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + message_bus.MessageBus, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_enrollment(self, + request: Optional[Union[eventarc.GetEnrollmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> enrollment.Enrollment: + r"""Get a single Enrollment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetEnrollmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_enrollment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetEnrollmentRequest, dict]]): + The request object. The request message for the + GetEnrollment method. + name (:class:`str`): + Required. The name of the Enrollment + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.Enrollment: + An enrollment represents a + subscription for messages on a + particular message bus. It defines a + matching criteria for messages on the + bus and the subscriber endpoint where + matched messages should be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetEnrollmentRequest): + request = eventarc.GetEnrollmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_enrollments(self, + request: Optional[Union[eventarc.ListEnrollmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEnrollmentsAsyncPager: + r"""List Enrollments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_enrollments(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_enrollments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListEnrollmentsRequest, dict]]): + The request object. The request message for the + ListEnrollments method. + parent (:class:`str`): + Required. The parent collection to + list triggers on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListEnrollmentsAsyncPager: + The response message for the + `ListEnrollments` method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListEnrollmentsRequest): + request = eventarc.ListEnrollmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_enrollments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnrollmentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_enrollment(self, + request: Optional[Union[eventarc.CreateEnrollmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + enrollment: Optional[gce_enrollment.Enrollment] = None, + enrollment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new Enrollment in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.CreateEnrollmentRequest( + parent="parent_value", + enrollment=enrollment, + enrollment_id="enrollment_id_value", + ) + + # Make the request + operation = client.create_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreateEnrollmentRequest, dict]]): + The request object. The request message for the + CreateEnrollment method. + parent (:class:`str`): + Required. The parent collection in + which to add this enrollment. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enrollment (:class:`google.cloud.eventarc_v1.types.Enrollment`): + Required. The enrollment to create. + This corresponds to the ``enrollment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enrollment_id (:class:`str`): + Required. The user-provided ID to be + assigned to the Enrollment. It should + match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``enrollment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Enrollment` + An enrollment represents a subscription + for messages on a particular message + bus. It defines a matching criteria for + messages on the bus and the subscriber + endpoint where matched messages should + be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, enrollment, enrollment_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateEnrollmentRequest): + request = eventarc.CreateEnrollmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if enrollment is not None: + request.enrollment = enrollment + if enrollment_id is not None: + request.enrollment_id = enrollment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_enrollment.Enrollment, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_enrollment(self, + request: Optional[Union[eventarc.UpdateEnrollmentRequest, dict]] = None, + *, + enrollment: Optional[gce_enrollment.Enrollment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single Enrollment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.UpdateEnrollmentRequest( + enrollment=enrollment, + ) + + # Make the request + operation = client.update_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateEnrollmentRequest, dict]]): + The request object. The request message for the + UpdateEnrollment method. + enrollment (:class:`google.cloud.eventarc_v1.types.Enrollment`): + Required. The Enrollment to be + updated. + + This corresponds to the ``enrollment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Enrollment` + An enrollment represents a subscription + for messages on a particular message + bus. It defines a matching criteria for + messages on the bus and the subscriber + endpoint where matched messages should + be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [enrollment, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateEnrollmentRequest): + request = eventarc.UpdateEnrollmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if enrollment is not None: + request.enrollment = enrollment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("enrollment.name", request.enrollment.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_enrollment.Enrollment, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_enrollment(self, + request: Optional[Union[eventarc.DeleteEnrollmentRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single Enrollment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteEnrollmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteEnrollmentRequest, dict]]): + The request object. The request message for the + DeleteEnrollment method. + name (:class:`str`): + Required. The name of the Enrollment + to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (:class:`str`): + Optional. If provided, the Enrollment + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Enrollment` + An enrollment represents a subscription + for messages on a particular message + bus. It defines a matching criteria for + messages on the bus and the subscriber + endpoint where matched messages should + be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteEnrollmentRequest): + request = eventarc.DeleteEnrollmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + enrollment.Enrollment, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_pipeline(self, + request: Optional[Union[eventarc.GetPipelineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pipeline.Pipeline: + r"""Get a single Pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetPipelineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_pipeline(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetPipelineRequest, dict]]): + The request object. The request message for the + GetPipeline method. + name (:class:`str`): + Required. The name of the pipeline to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.Pipeline: + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetPipelineRequest): + request = eventarc.GetPipelineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_pipelines(self, + request: Optional[Union[eventarc.ListPipelinesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPipelinesAsyncPager: + r"""List pipelines. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_pipelines(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListPipelinesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_pipelines(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListPipelinesRequest, dict]]): + The request object. The request message for the + ListPipelines method. + parent (:class:`str`): + Required. The parent collection to + list pipelines on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListPipelinesAsyncPager: + The response message for the + ListPipelines method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListPipelinesRequest): + request = eventarc.ListPipelinesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_pipelines] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPipelinesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_pipeline(self, + request: Optional[Union[eventarc.CreatePipelineRequest, dict]] = None, + *, + parent: Optional[str] = None, + pipeline: Optional[gce_pipeline.Pipeline] = None, + pipeline_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new Pipeline in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.CreatePipelineRequest( + parent="parent_value", + pipeline=pipeline, + pipeline_id="pipeline_id_value", + ) + + # Make the request + operation = client.create_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreatePipelineRequest, dict]]): + The request object. The request message for the + CreatePipeline method. + parent (:class:`str`): + Required. The parent collection in + which to add this pipeline. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + pipeline (:class:`google.cloud.eventarc_v1.types.Pipeline`): + Required. The pipeline to create. + This corresponds to the ``pipeline`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + pipeline_id (:class:`str`): + Required. The user-provided ID to be + assigned to the Pipeline. It should + match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``pipeline_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Pipeline` + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, pipeline, pipeline_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreatePipelineRequest): + request = eventarc.CreatePipelineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if pipeline is not None: + request.pipeline = pipeline + if pipeline_id is not None: + request.pipeline_id = pipeline_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_pipeline.Pipeline, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_pipeline(self, + request: Optional[Union[eventarc.UpdatePipelineRequest, dict]] = None, + *, + pipeline: Optional[gce_pipeline.Pipeline] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.UpdatePipelineRequest( + pipeline=pipeline, + ) + + # Make the request + operation = client.update_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdatePipelineRequest, dict]]): + The request object. The request message for the + UpdatePipeline method. + pipeline (:class:`google.cloud.eventarc_v1.types.Pipeline`): + Required. The Pipeline to be updated. + This corresponds to the ``pipeline`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Pipeline` + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [pipeline, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdatePipelineRequest): + request = eventarc.UpdatePipelineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if pipeline is not None: + request.pipeline = pipeline + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("pipeline.name", request.pipeline.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_pipeline.Pipeline, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_pipeline(self, + request: Optional[Union[eventarc.DeletePipelineRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeletePipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeletePipelineRequest, dict]]): + The request object. The request message for the + DeletePipeline method. + name (:class:`str`): + Required. The name of the Pipeline to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (:class:`str`): + Optional. If provided, the Pipeline + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Pipeline` + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeletePipelineRequest): + request = eventarc.DeletePipelineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + pipeline.Pipeline, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_google_api_source(self, + request: Optional[Union[eventarc.GetGoogleApiSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_api_source.GoogleApiSource: + r"""Get a single GoogleApiSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_google_api_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetGoogleApiSourceRequest, dict]]): + The request object. The request message for the + GetGoogleApiSource method. + name (:class:`str`): + Required. The name of the google api + source to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.GoogleApiSource: + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetGoogleApiSourceRequest): + request = eventarc.GetGoogleApiSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_google_api_sources(self, + request: Optional[Union[eventarc.ListGoogleApiSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGoogleApiSourcesAsyncPager: + r"""List GoogleApiSources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_google_api_sources(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListGoogleApiSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_google_api_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListGoogleApiSourcesRequest, dict]]): + The request object. The request message for the + ListGoogleApiSources method. + parent (:class:`str`): + Required. The parent collection to + list GoogleApiSources on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListGoogleApiSourcesAsyncPager: + The response message for the + `ListGoogleApiSources` method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListGoogleApiSourcesRequest): + request = eventarc.ListGoogleApiSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_google_api_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGoogleApiSourcesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_google_api_source(self, + request: Optional[Union[eventarc.CreateGoogleApiSourceRequest, dict]] = None, + *, + parent: Optional[str] = None, + google_api_source: Optional[gce_google_api_source.GoogleApiSource] = None, + google_api_source_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new GoogleApiSource in a particular project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.CreateGoogleApiSourceRequest( + parent="parent_value", + google_api_source=google_api_source, + google_api_source_id="google_api_source_id_value", + ) + + # Make the request + operation = client.create_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreateGoogleApiSourceRequest, dict]]): + The request object. The request message for the + CreateGoogleApiSource method. + parent (:class:`str`): + Required. The parent collection in + which to add this google api source. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + google_api_source (:class:`google.cloud.eventarc_v1.types.GoogleApiSource`): + Required. The google api source to + create. + + This corresponds to the ``google_api_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + google_api_source_id (:class:`str`): + Required. The user-provided ID to be + assigned to the GoogleApiSource. It + should match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``google_api_source_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.GoogleApiSource` + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, google_api_source, google_api_source_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateGoogleApiSourceRequest): + request = eventarc.CreateGoogleApiSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if google_api_source is not None: + request.google_api_source = google_api_source + if google_api_source_id is not None: + request.google_api_source_id = google_api_source_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_google_api_source.GoogleApiSource, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_google_api_source(self, + request: Optional[Union[eventarc.UpdateGoogleApiSourceRequest, dict]] = None, + *, + google_api_source: Optional[gce_google_api_source.GoogleApiSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single GoogleApiSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.UpdateGoogleApiSourceRequest( + google_api_source=google_api_source, + ) + + # Make the request + operation = client.update_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateGoogleApiSourceRequest, dict]]): + The request object. The request message for the + UpdateGoogleApiSource method. + google_api_source (:class:`google.cloud.eventarc_v1.types.GoogleApiSource`): + Required. The GoogleApiSource to be + updated. + + This corresponds to the ``google_api_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.GoogleApiSource` + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [google_api_source, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateGoogleApiSourceRequest): + request = eventarc.UpdateGoogleApiSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if google_api_source is not None: + request.google_api_source = google_api_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("google_api_source.name", request.google_api_source.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_google_api_source.GoogleApiSource, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_google_api_source(self, + request: Optional[Union[eventarc.DeleteGoogleApiSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single GoogleApiSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteGoogleApiSourceRequest, dict]]): + The request object. The request message for the + DeleteGoogleApiSource method. + name (:class:`str`): + Required. The name of the + GoogleApiSource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (:class:`str`): + Optional. If provided, the MessageBus + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.GoogleApiSource` + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteGoogleApiSourceRequest): + request = eventarc.DeleteGoogleApiSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + google_api_source.GoogleApiSource, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index b348f74715..2073446cd3 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -56,9 +56,18 @@ from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment +from google.cloud.eventarc_v1.types import enrollment as gce_enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source +from google.cloud.eventarc_v1.types import google_api_source as gce_google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import logging_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import message_bus as gce_message_bus +from google.cloud.eventarc_v1.types import pipeline +from google.cloud.eventarc_v1.types import pipeline as gce_pipeline from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 # type: ignore @@ -243,6 +252,28 @@ def parse_crypto_key_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def enrollment_path(project: str,location: str,enrollment: str,) -> str: + """Returns a fully-qualified enrollment string.""" + return "projects/{project}/locations/{location}/enrollments/{enrollment}".format(project=project, location=location, enrollment=enrollment, ) + + @staticmethod + def parse_enrollment_path(path: str) -> Dict[str,str]: + """Parses a enrollment path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/enrollments/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def google_api_source_path(project: str,location: str,google_api_source: str,) -> str: + """Returns a fully-qualified google_api_source string.""" + return "projects/{project}/locations/{location}/googleApiSources/{google_api_source}".format(project=project, location=location, google_api_source=google_api_source, ) + + @staticmethod + def parse_google_api_source_path(path: str) -> Dict[str,str]: + """Parses a google_api_source path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/googleApiSources/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def google_channel_config_path(project: str,location: str,) -> str: """Returns a fully-qualified google_channel_config string.""" @@ -254,6 +285,39 @@ def parse_google_channel_config_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/googleChannelConfig$", path) return m.groupdict() if m else {} + @staticmethod + def message_bus_path(project: str,location: str,message_bus: str,) -> str: + """Returns a fully-qualified message_bus string.""" + return "projects/{project}/locations/{location}/messageBuses/{message_bus}".format(project=project, location=location, message_bus=message_bus, ) + + @staticmethod + def parse_message_bus_path(path: str) -> Dict[str,str]: + """Parses a message_bus path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/messageBuses/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def network_attachment_path(project: str,region: str,networkattachment: str,) -> str: + """Returns a fully-qualified network_attachment string.""" + return "projects/{project}/regions/{region}/networkAttachments/{networkattachment}".format(project=project, region=region, networkattachment=networkattachment, ) + + @staticmethod + def parse_network_attachment_path(path: str) -> Dict[str,str]: + """Parses a network_attachment path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/networkAttachments/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def pipeline_path(project: str,location: str,pipeline: str,) -> str: + """Returns a fully-qualified pipeline string.""" + return "projects/{project}/locations/{location}/pipelines/{pipeline}".format(project=project, location=location, pipeline=pipeline, ) + + @staticmethod + def parse_pipeline_path(path: str) -> Dict[str,str]: + """Parses a pipeline path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/pipelines/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def provider_path(project: str,location: str,provider: str,) -> str: """Returns a fully-qualified provider string.""" @@ -287,6 +351,17 @@ def parse_service_account_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def topic_path(project: str,topic: str,) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str,str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def trigger_path(project: str,location: str,trigger: str,) -> str: """Returns a fully-qualified trigger string.""" @@ -983,7 +1058,6 @@ def sample_create_trigger(): parent="parent_value", trigger=trigger, trigger_id="trigger_id_value", - validate_only=True, ) # Make the request @@ -1123,7 +1197,6 @@ def sample_update_trigger(): # Initialize request argument(s) request = eventarc_v1.UpdateTriggerRequest( - validate_only=True, ) # Make the request @@ -1269,7 +1342,6 @@ def sample_delete_trigger(): # Initialize request argument(s) request = eventarc_v1.DeleteTriggerRequest( name="name_value", - validate_only=True, ) # Make the request @@ -1642,7 +1714,6 @@ def sample_create_channel(): parent="parent_value", channel=channel, channel_id="channel_id_value", - validate_only=True, ) # Make the request @@ -1787,7 +1858,6 @@ def sample_update_channel(): # Initialize request argument(s) request = eventarc_v1.UpdateChannelRequest( - validate_only=True, ) # Make the request @@ -1927,7 +1997,6 @@ def sample_delete_channel(): # Initialize request argument(s) request = eventarc_v1.DeleteChannelRequest( name="name_value", - validate_only=True, ) # Make the request @@ -2772,7 +2841,9 @@ def get_google_channel_config(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> google_channel_config.GoogleChannelConfig: - r"""Get a GoogleChannelConfig + r"""Get a GoogleChannelConfig. + The name of the GoogleChannelConfig in the response is + ALWAYS coded with projectID. .. code-block:: python @@ -3001,6 +3072,2746 @@ def sample_update_google_channel_config(): # Done; return the response. return response + def get_message_bus(self, + request: Optional[Union[eventarc.GetMessageBusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> message_bus.MessageBus: + r"""Get a single MessageBus. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetMessageBusRequest( + name="name_value", + ) + + # Make the request + response = client.get_message_bus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetMessageBusRequest, dict]): + The request object. The request message for the + GetMessageBus method. + name (str): + Required. The name of the message bus + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.MessageBus: + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetMessageBusRequest): + request = eventarc.GetMessageBusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_message_buses(self, + request: Optional[Union[eventarc.ListMessageBusesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMessageBusesPager: + r"""List message buses. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_message_buses(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_buses(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListMessageBusesRequest, dict]): + The request object. The request message for the + ListMessageBuses method. + parent (str): + Required. The parent collection to + list message buses on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusesPager: + The response message for the + `ListMessageBuses` method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListMessageBusesRequest): + request = eventarc.ListMessageBusesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_message_buses] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMessageBusesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_message_bus_enrollments(self, + request: Optional[Union[eventarc.ListMessageBusEnrollmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMessageBusEnrollmentsPager: + r"""List message bus enrollments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_message_bus_enrollments(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_bus_enrollments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsRequest, dict]): + The request object. The request message for the + `ListMessageBusEnrollments` method. + parent (str): + Required. The parent message bus to + list enrollments on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusEnrollmentsPager: + The response message for the + `ListMessageBusEnrollments` method.` + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListMessageBusEnrollmentsRequest): + request = eventarc.ListMessageBusEnrollmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_message_bus_enrollments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMessageBusEnrollmentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_message_bus(self, + request: Optional[Union[eventarc.CreateMessageBusRequest, dict]] = None, + *, + parent: Optional[str] = None, + message_bus: Optional[gce_message_bus.MessageBus] = None, + message_bus_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a new MessageBus in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_create_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.CreateMessageBusRequest( + parent="parent_value", + message_bus_id="message_bus_id_value", + ) + + # Make the request + operation = client.create_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateMessageBusRequest, dict]): + The request object. The request message for the + CreateMessageBus method. + parent (str): + Required. The parent collection in + which to add this message bus. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + message_bus (google.cloud.eventarc_v1.types.MessageBus): + Required. The message bus to create. + This corresponds to the ``message_bus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + message_bus_id (str): + Required. The user-provided ID to be + assigned to the MessageBus. It should + match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``message_bus_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.MessageBus` + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, message_bus, message_bus_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateMessageBusRequest): + request = eventarc.CreateMessageBusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if message_bus is not None: + request.message_bus = message_bus + if message_bus_id is not None: + request.message_bus_id = message_bus_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_message_bus.MessageBus, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_message_bus(self, + request: Optional[Union[eventarc.UpdateMessageBusRequest, dict]] = None, + *, + message_bus: Optional[gce_message_bus.MessageBus] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a single message bus. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateMessageBusRequest( + ) + + # Make the request + operation = client.update_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateMessageBusRequest, dict]): + The request object. The request message for the + UpdateMessageBus method. + message_bus (google.cloud.eventarc_v1.types.MessageBus): + Required. The MessageBus to be + updated. + + This corresponds to the ``message_bus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.MessageBus` + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [message_bus, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateMessageBusRequest): + request = eventarc.UpdateMessageBusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if message_bus is not None: + request.message_bus = message_bus + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("message_bus.name", request.message_bus.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_message_bus.MessageBus, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_message_bus(self, + request: Optional[Union[eventarc.DeleteMessageBusRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Delete a single message bus. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteMessageBusRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteMessageBusRequest, dict]): + The request object. The request message for the + DeleteMessageBus method. + name (str): + Required. The name of the MessageBus + to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (str): + Optional. If provided, the MessageBus + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.MessageBus` + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteMessageBusRequest): + request = eventarc.DeleteMessageBusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_message_bus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + message_bus.MessageBus, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_enrollment(self, + request: Optional[Union[eventarc.GetEnrollmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> enrollment.Enrollment: + r"""Get a single Enrollment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetEnrollmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_enrollment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetEnrollmentRequest, dict]): + The request object. The request message for the + GetEnrollment method. + name (str): + Required. The name of the Enrollment + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.Enrollment: + An enrollment represents a + subscription for messages on a + particular message bus. It defines a + matching criteria for messages on the + bus and the subscriber endpoint where + matched messages should be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetEnrollmentRequest): + request = eventarc.GetEnrollmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_enrollments(self, + request: Optional[Union[eventarc.ListEnrollmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEnrollmentsPager: + r"""List Enrollments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_enrollments(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_enrollments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListEnrollmentsRequest, dict]): + The request object. The request message for the + ListEnrollments method. + parent (str): + Required. The parent collection to + list triggers on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListEnrollmentsPager: + The response message for the + `ListEnrollments` method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListEnrollmentsRequest): + request = eventarc.ListEnrollmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_enrollments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnrollmentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_enrollment(self, + request: Optional[Union[eventarc.CreateEnrollmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + enrollment: Optional[gce_enrollment.Enrollment] = None, + enrollment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a new Enrollment in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_create_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.CreateEnrollmentRequest( + parent="parent_value", + enrollment=enrollment, + enrollment_id="enrollment_id_value", + ) + + # Make the request + operation = client.create_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateEnrollmentRequest, dict]): + The request object. The request message for the + CreateEnrollment method. + parent (str): + Required. The parent collection in + which to add this enrollment. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enrollment (google.cloud.eventarc_v1.types.Enrollment): + Required. The enrollment to create. + This corresponds to the ``enrollment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enrollment_id (str): + Required. The user-provided ID to be + assigned to the Enrollment. It should + match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``enrollment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Enrollment` + An enrollment represents a subscription + for messages on a particular message + bus. It defines a matching criteria for + messages on the bus and the subscriber + endpoint where matched messages should + be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, enrollment, enrollment_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateEnrollmentRequest): + request = eventarc.CreateEnrollmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if enrollment is not None: + request.enrollment = enrollment + if enrollment_id is not None: + request.enrollment_id = enrollment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_enrollment.Enrollment, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_enrollment(self, + request: Optional[Union[eventarc.UpdateEnrollmentRequest, dict]] = None, + *, + enrollment: Optional[gce_enrollment.Enrollment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a single Enrollment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.UpdateEnrollmentRequest( + enrollment=enrollment, + ) + + # Make the request + operation = client.update_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateEnrollmentRequest, dict]): + The request object. The request message for the + UpdateEnrollment method. + enrollment (google.cloud.eventarc_v1.types.Enrollment): + Required. The Enrollment to be + updated. + + This corresponds to the ``enrollment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Enrollment` + An enrollment represents a subscription + for messages on a particular message + bus. It defines a matching criteria for + messages on the bus and the subscriber + endpoint where matched messages should + be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [enrollment, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateEnrollmentRequest): + request = eventarc.UpdateEnrollmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if enrollment is not None: + request.enrollment = enrollment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("enrollment.name", request.enrollment.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_enrollment.Enrollment, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_enrollment(self, + request: Optional[Union[eventarc.DeleteEnrollmentRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Delete a single Enrollment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteEnrollmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteEnrollmentRequest, dict]): + The request object. The request message for the + DeleteEnrollment method. + name (str): + Required. The name of the Enrollment + to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (str): + Optional. If provided, the Enrollment + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Enrollment` + An enrollment represents a subscription + for messages on a particular message + bus. It defines a matching criteria for + messages on the bus and the subscriber + endpoint where matched messages should + be delivered. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteEnrollmentRequest): + request = eventarc.DeleteEnrollmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_enrollment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + enrollment.Enrollment, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_pipeline(self, + request: Optional[Union[eventarc.GetPipelineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pipeline.Pipeline: + r"""Get a single Pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetPipelineRequest( + name="name_value", + ) + + # Make the request + response = client.get_pipeline(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetPipelineRequest, dict]): + The request object. The request message for the + GetPipeline method. + name (str): + Required. The name of the pipeline to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.Pipeline: + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetPipelineRequest): + request = eventarc.GetPipelineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_pipelines(self, + request: Optional[Union[eventarc.ListPipelinesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPipelinesPager: + r"""List pipelines. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_pipelines(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListPipelinesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_pipelines(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListPipelinesRequest, dict]): + The request object. The request message for the + ListPipelines method. + parent (str): + Required. The parent collection to + list pipelines on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListPipelinesPager: + The response message for the + ListPipelines method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListPipelinesRequest): + request = eventarc.ListPipelinesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_pipelines] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPipelinesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_pipeline(self, + request: Optional[Union[eventarc.CreatePipelineRequest, dict]] = None, + *, + parent: Optional[str] = None, + pipeline: Optional[gce_pipeline.Pipeline] = None, + pipeline_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a new Pipeline in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_create_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.CreatePipelineRequest( + parent="parent_value", + pipeline=pipeline, + pipeline_id="pipeline_id_value", + ) + + # Make the request + operation = client.create_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreatePipelineRequest, dict]): + The request object. The request message for the + CreatePipeline method. + parent (str): + Required. The parent collection in + which to add this pipeline. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + pipeline (google.cloud.eventarc_v1.types.Pipeline): + Required. The pipeline to create. + This corresponds to the ``pipeline`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + pipeline_id (str): + Required. The user-provided ID to be + assigned to the Pipeline. It should + match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``pipeline_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Pipeline` + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, pipeline, pipeline_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreatePipelineRequest): + request = eventarc.CreatePipelineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if pipeline is not None: + request.pipeline = pipeline + if pipeline_id is not None: + request.pipeline_id = pipeline_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_pipeline.Pipeline, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_pipeline(self, + request: Optional[Union[eventarc.UpdatePipelineRequest, dict]] = None, + *, + pipeline: Optional[gce_pipeline.Pipeline] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a single pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.UpdatePipelineRequest( + pipeline=pipeline, + ) + + # Make the request + operation = client.update_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdatePipelineRequest, dict]): + The request object. The request message for the + UpdatePipeline method. + pipeline (google.cloud.eventarc_v1.types.Pipeline): + Required. The Pipeline to be updated. + This corresponds to the ``pipeline`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Pipeline` + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [pipeline, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdatePipelineRequest): + request = eventarc.UpdatePipelineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if pipeline is not None: + request.pipeline = pipeline + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("pipeline.name", request.pipeline.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_pipeline.Pipeline, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_pipeline(self, + request: Optional[Union[eventarc.DeletePipelineRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Delete a single pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeletePipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeletePipelineRequest, dict]): + The request object. The request message for the + DeletePipeline method. + name (str): + Required. The name of the Pipeline to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (str): + Optional. If provided, the Pipeline + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.Pipeline` + A representation of the Pipeline + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeletePipelineRequest): + request = eventarc.DeletePipelineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + pipeline.Pipeline, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_google_api_source(self, + request: Optional[Union[eventarc.GetGoogleApiSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_api_source.GoogleApiSource: + r"""Get a single GoogleApiSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_google_api_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetGoogleApiSourceRequest, dict]): + The request object. The request message for the + GetGoogleApiSource method. + name (str): + Required. The name of the google api + source to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.types.GoogleApiSource: + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetGoogleApiSourceRequest): + request = eventarc.GetGoogleApiSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_google_api_sources(self, + request: Optional[Union[eventarc.ListGoogleApiSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGoogleApiSourcesPager: + r"""List GoogleApiSources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_google_api_sources(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListGoogleApiSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_google_api_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListGoogleApiSourcesRequest, dict]): + The request object. The request message for the + ListGoogleApiSources method. + parent (str): + Required. The parent collection to + list GoogleApiSources on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListGoogleApiSourcesPager: + The response message for the + `ListGoogleApiSources` method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListGoogleApiSourcesRequest): + request = eventarc.ListGoogleApiSourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_google_api_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGoogleApiSourcesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_google_api_source(self, + request: Optional[Union[eventarc.CreateGoogleApiSourceRequest, dict]] = None, + *, + parent: Optional[str] = None, + google_api_source: Optional[gce_google_api_source.GoogleApiSource] = None, + google_api_source_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a new GoogleApiSource in a particular project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_create_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.CreateGoogleApiSourceRequest( + parent="parent_value", + google_api_source=google_api_source, + google_api_source_id="google_api_source_id_value", + ) + + # Make the request + operation = client.create_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateGoogleApiSourceRequest, dict]): + The request object. The request message for the + CreateGoogleApiSource method. + parent (str): + Required. The parent collection in + which to add this google api source. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + google_api_source (google.cloud.eventarc_v1.types.GoogleApiSource): + Required. The google api source to + create. + + This corresponds to the ``google_api_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + google_api_source_id (str): + Required. The user-provided ID to be + assigned to the GoogleApiSource. It + should match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + + This corresponds to the ``google_api_source_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.GoogleApiSource` + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, google_api_source, google_api_source_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateGoogleApiSourceRequest): + request = eventarc.CreateGoogleApiSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if google_api_source is not None: + request.google_api_source = google_api_source + if google_api_source_id is not None: + request.google_api_source_id = google_api_source_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_google_api_source.GoogleApiSource, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_google_api_source(self, + request: Optional[Union[eventarc.UpdateGoogleApiSourceRequest, dict]] = None, + *, + google_api_source: Optional[gce_google_api_source.GoogleApiSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a single GoogleApiSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.UpdateGoogleApiSourceRequest( + google_api_source=google_api_source, + ) + + # Make the request + operation = client.update_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateGoogleApiSourceRequest, dict]): + The request object. The request message for the + UpdateGoogleApiSource method. + google_api_source (google.cloud.eventarc_v1.types.GoogleApiSource): + Required. The GoogleApiSource to be + updated. + + This corresponds to the ``google_api_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; + only fields explicitly provided are + updated. If no field mask is provided, + all provided fields in the request are + updated. To update all fields, provide a + field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.GoogleApiSource` + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [google_api_source, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateGoogleApiSourceRequest): + request = eventarc.UpdateGoogleApiSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if google_api_source is not None: + request.google_api_source = google_api_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("google_api_source.name", request.google_api_source.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_google_api_source.GoogleApiSource, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_google_api_source(self, + request: Optional[Union[eventarc.DeleteGoogleApiSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + etag: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Delete a single GoogleApiSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteGoogleApiSourceRequest, dict]): + The request object. The request message for the + DeleteGoogleApiSource method. + name (str): + Required. The name of the + GoogleApiSource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + etag (str): + Optional. If provided, the MessageBus + will only be deleted if the etag matches + the current etag on the resource. + + This corresponds to the ``etag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running + operation. + The result type for the operation will + be + :class:`google.cloud.eventarc_v1.types.GoogleApiSource` + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, etag] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteGoogleApiSourceRequest): + request = eventarc.DeleteGoogleApiSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if etag is not None: + request.etag = etag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_google_api_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + google_api_source.GoogleApiSource, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "EventarcClient": return self diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 0b116f59f3..4f48693d2a 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -27,7 +27,11 @@ from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import pipeline from google.cloud.eventarc_v1.types import trigger @@ -585,3 +589,698 @@ async def async_generator(): def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMessageBusesPager: + """A pager for iterating through ``list_message_buses`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListMessageBusesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``message_buses`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMessageBuses`` requests and continue to iterate + through the ``message_buses`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListMessageBusesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListMessageBusesResponse], + request: eventarc.ListMessageBusesRequest, + response: eventarc.ListMessageBusesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListMessageBusesRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListMessageBusesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListMessageBusesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListMessageBusesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[message_bus.MessageBus]: + for page in self.pages: + yield from page.message_buses + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMessageBusesAsyncPager: + """A pager for iterating through ``list_message_buses`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListMessageBusesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``message_buses`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMessageBuses`` requests and continue to iterate + through the ``message_buses`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListMessageBusesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListMessageBusesResponse]], + request: eventarc.ListMessageBusesRequest, + response: eventarc.ListMessageBusesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListMessageBusesRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListMessageBusesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListMessageBusesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListMessageBusesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[message_bus.MessageBus]: + async def async_generator(): + async for page in self.pages: + for response in page.message_buses: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMessageBusEnrollmentsPager: + """A pager for iterating through ``list_message_bus_enrollments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``enrollments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMessageBusEnrollments`` requests and continue to iterate + through the ``enrollments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListMessageBusEnrollmentsResponse], + request: eventarc.ListMessageBusEnrollmentsRequest, + response: eventarc.ListMessageBusEnrollmentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListMessageBusEnrollmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListMessageBusEnrollmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.enrollments + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMessageBusEnrollmentsAsyncPager: + """A pager for iterating through ``list_message_bus_enrollments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``enrollments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMessageBusEnrollments`` requests and continue to iterate + through the ``enrollments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListMessageBusEnrollmentsResponse]], + request: eventarc.ListMessageBusEnrollmentsRequest, + response: eventarc.ListMessageBusEnrollmentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListMessageBusEnrollmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListMessageBusEnrollmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.enrollments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEnrollmentsPager: + """A pager for iterating through ``list_enrollments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListEnrollmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``enrollments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEnrollments`` requests and continue to iterate + through the ``enrollments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListEnrollmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListEnrollmentsResponse], + request: eventarc.ListEnrollmentsRequest, + response: eventarc.ListEnrollmentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListEnrollmentsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListEnrollmentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListEnrollmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListEnrollmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[enrollment.Enrollment]: + for page in self.pages: + yield from page.enrollments + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEnrollmentsAsyncPager: + """A pager for iterating through ``list_enrollments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListEnrollmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``enrollments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEnrollments`` requests and continue to iterate + through the ``enrollments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListEnrollmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListEnrollmentsResponse]], + request: eventarc.ListEnrollmentsRequest, + response: eventarc.ListEnrollmentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListEnrollmentsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListEnrollmentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListEnrollmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListEnrollmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[enrollment.Enrollment]: + async def async_generator(): + async for page in self.pages: + for response in page.enrollments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPipelinesPager: + """A pager for iterating through ``list_pipelines`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListPipelinesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``pipelines`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPipelines`` requests and continue to iterate + through the ``pipelines`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListPipelinesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListPipelinesResponse], + request: eventarc.ListPipelinesRequest, + response: eventarc.ListPipelinesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListPipelinesRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListPipelinesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListPipelinesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListPipelinesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[pipeline.Pipeline]: + for page in self.pages: + yield from page.pipelines + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPipelinesAsyncPager: + """A pager for iterating through ``list_pipelines`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListPipelinesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``pipelines`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPipelines`` requests and continue to iterate + through the ``pipelines`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListPipelinesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListPipelinesResponse]], + request: eventarc.ListPipelinesRequest, + response: eventarc.ListPipelinesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListPipelinesRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListPipelinesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListPipelinesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListPipelinesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[pipeline.Pipeline]: + async def async_generator(): + async for page in self.pages: + for response in page.pipelines: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGoogleApiSourcesPager: + """A pager for iterating through ``list_google_api_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListGoogleApiSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``google_api_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGoogleApiSources`` requests and continue to iterate + through the ``google_api_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListGoogleApiSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListGoogleApiSourcesResponse], + request: eventarc.ListGoogleApiSourcesRequest, + response: eventarc.ListGoogleApiSourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListGoogleApiSourcesRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListGoogleApiSourcesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListGoogleApiSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListGoogleApiSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[google_api_source.GoogleApiSource]: + for page in self.pages: + yield from page.google_api_sources + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGoogleApiSourcesAsyncPager: + """A pager for iterating through ``list_google_api_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListGoogleApiSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``google_api_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGoogleApiSources`` requests and continue to iterate + through the ``google_api_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListGoogleApiSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListGoogleApiSourcesResponse]], + request: eventarc.ListGoogleApiSourcesRequest, + response: eventarc.ListGoogleApiSourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListGoogleApiSourcesRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListGoogleApiSourcesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = eventarc.ListGoogleApiSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListGoogleApiSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[google_api_source.GoogleApiSource]: + async def async_generator(): + async for page in self.pages: + for response in page.google_api_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index ca6cf24f12..66d4e2ec56 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -31,9 +31,13 @@ from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import pipeline from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -226,6 +230,111 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_message_bus: gapic_v1.method.wrap_method( + self.get_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.list_message_buses: gapic_v1.method.wrap_method( + self.list_message_buses, + default_timeout=None, + client_info=client_info, + ), + self.list_message_bus_enrollments: gapic_v1.method.wrap_method( + self.list_message_bus_enrollments, + default_timeout=None, + client_info=client_info, + ), + self.create_message_bus: gapic_v1.method.wrap_method( + self.create_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.update_message_bus: gapic_v1.method.wrap_method( + self.update_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.delete_message_bus: gapic_v1.method.wrap_method( + self.delete_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.get_enrollment: gapic_v1.method.wrap_method( + self.get_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.list_enrollments: gapic_v1.method.wrap_method( + self.list_enrollments, + default_timeout=None, + client_info=client_info, + ), + self.create_enrollment: gapic_v1.method.wrap_method( + self.create_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.update_enrollment: gapic_v1.method.wrap_method( + self.update_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.delete_enrollment: gapic_v1.method.wrap_method( + self.delete_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.get_pipeline: gapic_v1.method.wrap_method( + self.get_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.list_pipelines: gapic_v1.method.wrap_method( + self.list_pipelines, + default_timeout=None, + client_info=client_info, + ), + self.create_pipeline: gapic_v1.method.wrap_method( + self.create_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.update_pipeline: gapic_v1.method.wrap_method( + self.update_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.delete_pipeline: gapic_v1.method.wrap_method( + self.delete_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.get_google_api_source: gapic_v1.method.wrap_method( + self.get_google_api_source, + default_timeout=None, + client_info=client_info, + ), + self.list_google_api_sources: gapic_v1.method.wrap_method( + self.list_google_api_sources, + default_timeout=None, + client_info=client_info, + ), + self.create_google_api_source: gapic_v1.method.wrap_method( + self.create_google_api_source, + default_timeout=None, + client_info=client_info, + ), + self.update_google_api_source: gapic_v1.method.wrap_method( + self.update_google_api_source, + default_timeout=None, + client_info=client_info, + ), + self.delete_google_api_source: gapic_v1.method.wrap_method( + self.delete_google_api_source, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -449,6 +558,195 @@ def update_google_channel_config(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_message_bus(self) -> Callable[ + [eventarc.GetMessageBusRequest], + Union[ + message_bus.MessageBus, + Awaitable[message_bus.MessageBus] + ]]: + raise NotImplementedError() + + @property + def list_message_buses(self) -> Callable[ + [eventarc.ListMessageBusesRequest], + Union[ + eventarc.ListMessageBusesResponse, + Awaitable[eventarc.ListMessageBusesResponse] + ]]: + raise NotImplementedError() + + @property + def list_message_bus_enrollments(self) -> Callable[ + [eventarc.ListMessageBusEnrollmentsRequest], + Union[ + eventarc.ListMessageBusEnrollmentsResponse, + Awaitable[eventarc.ListMessageBusEnrollmentsResponse] + ]]: + raise NotImplementedError() + + @property + def create_message_bus(self) -> Callable[ + [eventarc.CreateMessageBusRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_message_bus(self) -> Callable[ + [eventarc.UpdateMessageBusRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_message_bus(self) -> Callable[ + [eventarc.DeleteMessageBusRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_enrollment(self) -> Callable[ + [eventarc.GetEnrollmentRequest], + Union[ + enrollment.Enrollment, + Awaitable[enrollment.Enrollment] + ]]: + raise NotImplementedError() + + @property + def list_enrollments(self) -> Callable[ + [eventarc.ListEnrollmentsRequest], + Union[ + eventarc.ListEnrollmentsResponse, + Awaitable[eventarc.ListEnrollmentsResponse] + ]]: + raise NotImplementedError() + + @property + def create_enrollment(self) -> Callable[ + [eventarc.CreateEnrollmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_enrollment(self) -> Callable[ + [eventarc.UpdateEnrollmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_enrollment(self) -> Callable[ + [eventarc.DeleteEnrollmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_pipeline(self) -> Callable[ + [eventarc.GetPipelineRequest], + Union[ + pipeline.Pipeline, + Awaitable[pipeline.Pipeline] + ]]: + raise NotImplementedError() + + @property + def list_pipelines(self) -> Callable[ + [eventarc.ListPipelinesRequest], + Union[ + eventarc.ListPipelinesResponse, + Awaitable[eventarc.ListPipelinesResponse] + ]]: + raise NotImplementedError() + + @property + def create_pipeline(self) -> Callable[ + [eventarc.CreatePipelineRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_pipeline(self) -> Callable[ + [eventarc.UpdatePipelineRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_pipeline(self) -> Callable[ + [eventarc.DeletePipelineRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_google_api_source(self) -> Callable[ + [eventarc.GetGoogleApiSourceRequest], + Union[ + google_api_source.GoogleApiSource, + Awaitable[google_api_source.GoogleApiSource] + ]]: + raise NotImplementedError() + + @property + def list_google_api_sources(self) -> Callable[ + [eventarc.ListGoogleApiSourcesRequest], + Union[ + eventarc.ListGoogleApiSourcesResponse, + Awaitable[eventarc.ListGoogleApiSourcesResponse] + ]]: + raise NotImplementedError() + + @property + def create_google_api_source(self) -> Callable[ + [eventarc.CreateGoogleApiSourceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_google_api_source(self) -> Callable[ + [eventarc.UpdateGoogleApiSourceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_google_api_source(self) -> Callable[ + [eventarc.DeleteGoogleApiSourceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 887700548d..16c8bb2114 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -34,9 +34,13 @@ from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import pipeline from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -764,7 +768,9 @@ def get_google_channel_config(self) -> Callable[ google_channel_config.GoogleChannelConfig]: r"""Return a callable for the get google channel config method over gRPC. - Get a GoogleChannelConfig + Get a GoogleChannelConfig. + The name of the GoogleChannelConfig in the response is + ALWAYS coded with projectID. Returns: Callable[[~.GetGoogleChannelConfigRequest], @@ -810,6 +816,556 @@ def update_google_channel_config(self) -> Callable[ ) return self._stubs['update_google_channel_config'] + @property + def get_message_bus(self) -> Callable[ + [eventarc.GetMessageBusRequest], + message_bus.MessageBus]: + r"""Return a callable for the get message bus method over gRPC. + + Get a single MessageBus. + + Returns: + Callable[[~.GetMessageBusRequest], + ~.MessageBus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_message_bus' not in self._stubs: + self._stubs['get_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetMessageBus', + request_serializer=eventarc.GetMessageBusRequest.serialize, + response_deserializer=message_bus.MessageBus.deserialize, + ) + return self._stubs['get_message_bus'] + + @property + def list_message_buses(self) -> Callable[ + [eventarc.ListMessageBusesRequest], + eventarc.ListMessageBusesResponse]: + r"""Return a callable for the list message buses method over gRPC. + + List message buses. + + Returns: + Callable[[~.ListMessageBusesRequest], + ~.ListMessageBusesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_message_buses' not in self._stubs: + self._stubs['list_message_buses'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListMessageBuses', + request_serializer=eventarc.ListMessageBusesRequest.serialize, + response_deserializer=eventarc.ListMessageBusesResponse.deserialize, + ) + return self._stubs['list_message_buses'] + + @property + def list_message_bus_enrollments(self) -> Callable[ + [eventarc.ListMessageBusEnrollmentsRequest], + eventarc.ListMessageBusEnrollmentsResponse]: + r"""Return a callable for the list message bus enrollments method over gRPC. + + List message bus enrollments. + + Returns: + Callable[[~.ListMessageBusEnrollmentsRequest], + ~.ListMessageBusEnrollmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_message_bus_enrollments' not in self._stubs: + self._stubs['list_message_bus_enrollments'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListMessageBusEnrollments', + request_serializer=eventarc.ListMessageBusEnrollmentsRequest.serialize, + response_deserializer=eventarc.ListMessageBusEnrollmentsResponse.deserialize, + ) + return self._stubs['list_message_bus_enrollments'] + + @property + def create_message_bus(self) -> Callable[ + [eventarc.CreateMessageBusRequest], + operations_pb2.Operation]: + r"""Return a callable for the create message bus method over gRPC. + + Create a new MessageBus in a particular project and + location. + + Returns: + Callable[[~.CreateMessageBusRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_message_bus' not in self._stubs: + self._stubs['create_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateMessageBus', + request_serializer=eventarc.CreateMessageBusRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_message_bus'] + + @property + def update_message_bus(self) -> Callable[ + [eventarc.UpdateMessageBusRequest], + operations_pb2.Operation]: + r"""Return a callable for the update message bus method over gRPC. + + Update a single message bus. + + Returns: + Callable[[~.UpdateMessageBusRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_message_bus' not in self._stubs: + self._stubs['update_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateMessageBus', + request_serializer=eventarc.UpdateMessageBusRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_message_bus'] + + @property + def delete_message_bus(self) -> Callable[ + [eventarc.DeleteMessageBusRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete message bus method over gRPC. + + Delete a single message bus. + + Returns: + Callable[[~.DeleteMessageBusRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_message_bus' not in self._stubs: + self._stubs['delete_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteMessageBus', + request_serializer=eventarc.DeleteMessageBusRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_message_bus'] + + @property + def get_enrollment(self) -> Callable[ + [eventarc.GetEnrollmentRequest], + enrollment.Enrollment]: + r"""Return a callable for the get enrollment method over gRPC. + + Get a single Enrollment. + + Returns: + Callable[[~.GetEnrollmentRequest], + ~.Enrollment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_enrollment' not in self._stubs: + self._stubs['get_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetEnrollment', + request_serializer=eventarc.GetEnrollmentRequest.serialize, + response_deserializer=enrollment.Enrollment.deserialize, + ) + return self._stubs['get_enrollment'] + + @property + def list_enrollments(self) -> Callable[ + [eventarc.ListEnrollmentsRequest], + eventarc.ListEnrollmentsResponse]: + r"""Return a callable for the list enrollments method over gRPC. + + List Enrollments. + + Returns: + Callable[[~.ListEnrollmentsRequest], + ~.ListEnrollmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_enrollments' not in self._stubs: + self._stubs['list_enrollments'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListEnrollments', + request_serializer=eventarc.ListEnrollmentsRequest.serialize, + response_deserializer=eventarc.ListEnrollmentsResponse.deserialize, + ) + return self._stubs['list_enrollments'] + + @property + def create_enrollment(self) -> Callable[ + [eventarc.CreateEnrollmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the create enrollment method over gRPC. + + Create a new Enrollment in a particular project and + location. + + Returns: + Callable[[~.CreateEnrollmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_enrollment' not in self._stubs: + self._stubs['create_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateEnrollment', + request_serializer=eventarc.CreateEnrollmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_enrollment'] + + @property + def update_enrollment(self) -> Callable[ + [eventarc.UpdateEnrollmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the update enrollment method over gRPC. + + Update a single Enrollment. + + Returns: + Callable[[~.UpdateEnrollmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_enrollment' not in self._stubs: + self._stubs['update_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateEnrollment', + request_serializer=eventarc.UpdateEnrollmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_enrollment'] + + @property + def delete_enrollment(self) -> Callable[ + [eventarc.DeleteEnrollmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete enrollment method over gRPC. + + Delete a single Enrollment. + + Returns: + Callable[[~.DeleteEnrollmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_enrollment' not in self._stubs: + self._stubs['delete_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteEnrollment', + request_serializer=eventarc.DeleteEnrollmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_enrollment'] + + @property + def get_pipeline(self) -> Callable[ + [eventarc.GetPipelineRequest], + pipeline.Pipeline]: + r"""Return a callable for the get pipeline method over gRPC. + + Get a single Pipeline. + + Returns: + Callable[[~.GetPipelineRequest], + ~.Pipeline]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_pipeline' not in self._stubs: + self._stubs['get_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetPipeline', + request_serializer=eventarc.GetPipelineRequest.serialize, + response_deserializer=pipeline.Pipeline.deserialize, + ) + return self._stubs['get_pipeline'] + + @property + def list_pipelines(self) -> Callable[ + [eventarc.ListPipelinesRequest], + eventarc.ListPipelinesResponse]: + r"""Return a callable for the list pipelines method over gRPC. + + List pipelines. + + Returns: + Callable[[~.ListPipelinesRequest], + ~.ListPipelinesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_pipelines' not in self._stubs: + self._stubs['list_pipelines'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListPipelines', + request_serializer=eventarc.ListPipelinesRequest.serialize, + response_deserializer=eventarc.ListPipelinesResponse.deserialize, + ) + return self._stubs['list_pipelines'] + + @property + def create_pipeline(self) -> Callable[ + [eventarc.CreatePipelineRequest], + operations_pb2.Operation]: + r"""Return a callable for the create pipeline method over gRPC. + + Create a new Pipeline in a particular project and + location. + + Returns: + Callable[[~.CreatePipelineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_pipeline' not in self._stubs: + self._stubs['create_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreatePipeline', + request_serializer=eventarc.CreatePipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_pipeline'] + + @property + def update_pipeline(self) -> Callable[ + [eventarc.UpdatePipelineRequest], + operations_pb2.Operation]: + r"""Return a callable for the update pipeline method over gRPC. + + Update a single pipeline. + + Returns: + Callable[[~.UpdatePipelineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_pipeline' not in self._stubs: + self._stubs['update_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdatePipeline', + request_serializer=eventarc.UpdatePipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_pipeline'] + + @property + def delete_pipeline(self) -> Callable[ + [eventarc.DeletePipelineRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete pipeline method over gRPC. + + Delete a single pipeline. + + Returns: + Callable[[~.DeletePipelineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_pipeline' not in self._stubs: + self._stubs['delete_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeletePipeline', + request_serializer=eventarc.DeletePipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_pipeline'] + + @property + def get_google_api_source(self) -> Callable[ + [eventarc.GetGoogleApiSourceRequest], + google_api_source.GoogleApiSource]: + r"""Return a callable for the get google api source method over gRPC. + + Get a single GoogleApiSource. + + Returns: + Callable[[~.GetGoogleApiSourceRequest], + ~.GoogleApiSource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_google_api_source' not in self._stubs: + self._stubs['get_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetGoogleApiSource', + request_serializer=eventarc.GetGoogleApiSourceRequest.serialize, + response_deserializer=google_api_source.GoogleApiSource.deserialize, + ) + return self._stubs['get_google_api_source'] + + @property + def list_google_api_sources(self) -> Callable[ + [eventarc.ListGoogleApiSourcesRequest], + eventarc.ListGoogleApiSourcesResponse]: + r"""Return a callable for the list google api sources method over gRPC. + + List GoogleApiSources. + + Returns: + Callable[[~.ListGoogleApiSourcesRequest], + ~.ListGoogleApiSourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_google_api_sources' not in self._stubs: + self._stubs['list_google_api_sources'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListGoogleApiSources', + request_serializer=eventarc.ListGoogleApiSourcesRequest.serialize, + response_deserializer=eventarc.ListGoogleApiSourcesResponse.deserialize, + ) + return self._stubs['list_google_api_sources'] + + @property + def create_google_api_source(self) -> Callable[ + [eventarc.CreateGoogleApiSourceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create google api source method over gRPC. + + Create a new GoogleApiSource in a particular project + and location. + + Returns: + Callable[[~.CreateGoogleApiSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_google_api_source' not in self._stubs: + self._stubs['create_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateGoogleApiSource', + request_serializer=eventarc.CreateGoogleApiSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_google_api_source'] + + @property + def update_google_api_source(self) -> Callable[ + [eventarc.UpdateGoogleApiSourceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update google api source method over gRPC. + + Update a single GoogleApiSource. + + Returns: + Callable[[~.UpdateGoogleApiSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_google_api_source' not in self._stubs: + self._stubs['update_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleApiSource', + request_serializer=eventarc.UpdateGoogleApiSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_google_api_source'] + + @property + def delete_google_api_source(self) -> Callable[ + [eventarc.DeleteGoogleApiSourceRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete google api source method over gRPC. + + Delete a single GoogleApiSource. + + Returns: + Callable[[~.DeleteGoogleApiSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_google_api_source' not in self._stubs: + self._stubs['delete_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteGoogleApiSource', + request_serializer=eventarc.DeleteGoogleApiSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_google_api_source'] + def close(self): self._logged_channel.close() diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 6c940b3a82..3ad12ab796 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -37,9 +37,13 @@ from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import pipeline from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -770,7 +774,9 @@ def get_google_channel_config(self) -> Callable[ Awaitable[google_channel_config.GoogleChannelConfig]]: r"""Return a callable for the get google channel config method over gRPC. - Get a GoogleChannelConfig + Get a GoogleChannelConfig. + The name of the GoogleChannelConfig in the response is + ALWAYS coded with projectID. Returns: Callable[[~.GetGoogleChannelConfigRequest], @@ -816,6 +822,556 @@ def update_google_channel_config(self) -> Callable[ ) return self._stubs['update_google_channel_config'] + @property + def get_message_bus(self) -> Callable[ + [eventarc.GetMessageBusRequest], + Awaitable[message_bus.MessageBus]]: + r"""Return a callable for the get message bus method over gRPC. + + Get a single MessageBus. + + Returns: + Callable[[~.GetMessageBusRequest], + Awaitable[~.MessageBus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_message_bus' not in self._stubs: + self._stubs['get_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetMessageBus', + request_serializer=eventarc.GetMessageBusRequest.serialize, + response_deserializer=message_bus.MessageBus.deserialize, + ) + return self._stubs['get_message_bus'] + + @property + def list_message_buses(self) -> Callable[ + [eventarc.ListMessageBusesRequest], + Awaitable[eventarc.ListMessageBusesResponse]]: + r"""Return a callable for the list message buses method over gRPC. + + List message buses. + + Returns: + Callable[[~.ListMessageBusesRequest], + Awaitable[~.ListMessageBusesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_message_buses' not in self._stubs: + self._stubs['list_message_buses'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListMessageBuses', + request_serializer=eventarc.ListMessageBusesRequest.serialize, + response_deserializer=eventarc.ListMessageBusesResponse.deserialize, + ) + return self._stubs['list_message_buses'] + + @property + def list_message_bus_enrollments(self) -> Callable[ + [eventarc.ListMessageBusEnrollmentsRequest], + Awaitable[eventarc.ListMessageBusEnrollmentsResponse]]: + r"""Return a callable for the list message bus enrollments method over gRPC. + + List message bus enrollments. + + Returns: + Callable[[~.ListMessageBusEnrollmentsRequest], + Awaitable[~.ListMessageBusEnrollmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_message_bus_enrollments' not in self._stubs: + self._stubs['list_message_bus_enrollments'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListMessageBusEnrollments', + request_serializer=eventarc.ListMessageBusEnrollmentsRequest.serialize, + response_deserializer=eventarc.ListMessageBusEnrollmentsResponse.deserialize, + ) + return self._stubs['list_message_bus_enrollments'] + + @property + def create_message_bus(self) -> Callable[ + [eventarc.CreateMessageBusRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create message bus method over gRPC. + + Create a new MessageBus in a particular project and + location. + + Returns: + Callable[[~.CreateMessageBusRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_message_bus' not in self._stubs: + self._stubs['create_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateMessageBus', + request_serializer=eventarc.CreateMessageBusRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_message_bus'] + + @property + def update_message_bus(self) -> Callable[ + [eventarc.UpdateMessageBusRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update message bus method over gRPC. + + Update a single message bus. + + Returns: + Callable[[~.UpdateMessageBusRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_message_bus' not in self._stubs: + self._stubs['update_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateMessageBus', + request_serializer=eventarc.UpdateMessageBusRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_message_bus'] + + @property + def delete_message_bus(self) -> Callable[ + [eventarc.DeleteMessageBusRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete message bus method over gRPC. + + Delete a single message bus. + + Returns: + Callable[[~.DeleteMessageBusRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_message_bus' not in self._stubs: + self._stubs['delete_message_bus'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteMessageBus', + request_serializer=eventarc.DeleteMessageBusRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_message_bus'] + + @property + def get_enrollment(self) -> Callable[ + [eventarc.GetEnrollmentRequest], + Awaitable[enrollment.Enrollment]]: + r"""Return a callable for the get enrollment method over gRPC. + + Get a single Enrollment. + + Returns: + Callable[[~.GetEnrollmentRequest], + Awaitable[~.Enrollment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_enrollment' not in self._stubs: + self._stubs['get_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetEnrollment', + request_serializer=eventarc.GetEnrollmentRequest.serialize, + response_deserializer=enrollment.Enrollment.deserialize, + ) + return self._stubs['get_enrollment'] + + @property + def list_enrollments(self) -> Callable[ + [eventarc.ListEnrollmentsRequest], + Awaitable[eventarc.ListEnrollmentsResponse]]: + r"""Return a callable for the list enrollments method over gRPC. + + List Enrollments. + + Returns: + Callable[[~.ListEnrollmentsRequest], + Awaitable[~.ListEnrollmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_enrollments' not in self._stubs: + self._stubs['list_enrollments'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListEnrollments', + request_serializer=eventarc.ListEnrollmentsRequest.serialize, + response_deserializer=eventarc.ListEnrollmentsResponse.deserialize, + ) + return self._stubs['list_enrollments'] + + @property + def create_enrollment(self) -> Callable[ + [eventarc.CreateEnrollmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create enrollment method over gRPC. + + Create a new Enrollment in a particular project and + location. + + Returns: + Callable[[~.CreateEnrollmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_enrollment' not in self._stubs: + self._stubs['create_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateEnrollment', + request_serializer=eventarc.CreateEnrollmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_enrollment'] + + @property + def update_enrollment(self) -> Callable[ + [eventarc.UpdateEnrollmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update enrollment method over gRPC. + + Update a single Enrollment. + + Returns: + Callable[[~.UpdateEnrollmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_enrollment' not in self._stubs: + self._stubs['update_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateEnrollment', + request_serializer=eventarc.UpdateEnrollmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_enrollment'] + + @property + def delete_enrollment(self) -> Callable[ + [eventarc.DeleteEnrollmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete enrollment method over gRPC. + + Delete a single Enrollment. + + Returns: + Callable[[~.DeleteEnrollmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_enrollment' not in self._stubs: + self._stubs['delete_enrollment'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteEnrollment', + request_serializer=eventarc.DeleteEnrollmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_enrollment'] + + @property + def get_pipeline(self) -> Callable[ + [eventarc.GetPipelineRequest], + Awaitable[pipeline.Pipeline]]: + r"""Return a callable for the get pipeline method over gRPC. + + Get a single Pipeline. + + Returns: + Callable[[~.GetPipelineRequest], + Awaitable[~.Pipeline]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_pipeline' not in self._stubs: + self._stubs['get_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetPipeline', + request_serializer=eventarc.GetPipelineRequest.serialize, + response_deserializer=pipeline.Pipeline.deserialize, + ) + return self._stubs['get_pipeline'] + + @property + def list_pipelines(self) -> Callable[ + [eventarc.ListPipelinesRequest], + Awaitable[eventarc.ListPipelinesResponse]]: + r"""Return a callable for the list pipelines method over gRPC. + + List pipelines. + + Returns: + Callable[[~.ListPipelinesRequest], + Awaitable[~.ListPipelinesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_pipelines' not in self._stubs: + self._stubs['list_pipelines'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListPipelines', + request_serializer=eventarc.ListPipelinesRequest.serialize, + response_deserializer=eventarc.ListPipelinesResponse.deserialize, + ) + return self._stubs['list_pipelines'] + + @property + def create_pipeline(self) -> Callable[ + [eventarc.CreatePipelineRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create pipeline method over gRPC. + + Create a new Pipeline in a particular project and + location. + + Returns: + Callable[[~.CreatePipelineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_pipeline' not in self._stubs: + self._stubs['create_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreatePipeline', + request_serializer=eventarc.CreatePipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_pipeline'] + + @property + def update_pipeline(self) -> Callable[ + [eventarc.UpdatePipelineRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update pipeline method over gRPC. + + Update a single pipeline. + + Returns: + Callable[[~.UpdatePipelineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_pipeline' not in self._stubs: + self._stubs['update_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdatePipeline', + request_serializer=eventarc.UpdatePipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_pipeline'] + + @property + def delete_pipeline(self) -> Callable[ + [eventarc.DeletePipelineRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete pipeline method over gRPC. + + Delete a single pipeline. + + Returns: + Callable[[~.DeletePipelineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_pipeline' not in self._stubs: + self._stubs['delete_pipeline'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeletePipeline', + request_serializer=eventarc.DeletePipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_pipeline'] + + @property + def get_google_api_source(self) -> Callable[ + [eventarc.GetGoogleApiSourceRequest], + Awaitable[google_api_source.GoogleApiSource]]: + r"""Return a callable for the get google api source method over gRPC. + + Get a single GoogleApiSource. + + Returns: + Callable[[~.GetGoogleApiSourceRequest], + Awaitable[~.GoogleApiSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_google_api_source' not in self._stubs: + self._stubs['get_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetGoogleApiSource', + request_serializer=eventarc.GetGoogleApiSourceRequest.serialize, + response_deserializer=google_api_source.GoogleApiSource.deserialize, + ) + return self._stubs['get_google_api_source'] + + @property + def list_google_api_sources(self) -> Callable[ + [eventarc.ListGoogleApiSourcesRequest], + Awaitable[eventarc.ListGoogleApiSourcesResponse]]: + r"""Return a callable for the list google api sources method over gRPC. + + List GoogleApiSources. + + Returns: + Callable[[~.ListGoogleApiSourcesRequest], + Awaitable[~.ListGoogleApiSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_google_api_sources' not in self._stubs: + self._stubs['list_google_api_sources'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListGoogleApiSources', + request_serializer=eventarc.ListGoogleApiSourcesRequest.serialize, + response_deserializer=eventarc.ListGoogleApiSourcesResponse.deserialize, + ) + return self._stubs['list_google_api_sources'] + + @property + def create_google_api_source(self) -> Callable[ + [eventarc.CreateGoogleApiSourceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create google api source method over gRPC. + + Create a new GoogleApiSource in a particular project + and location. + + Returns: + Callable[[~.CreateGoogleApiSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_google_api_source' not in self._stubs: + self._stubs['create_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateGoogleApiSource', + request_serializer=eventarc.CreateGoogleApiSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_google_api_source'] + + @property + def update_google_api_source(self) -> Callable[ + [eventarc.UpdateGoogleApiSourceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update google api source method over gRPC. + + Update a single GoogleApiSource. + + Returns: + Callable[[~.UpdateGoogleApiSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_google_api_source' not in self._stubs: + self._stubs['update_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleApiSource', + request_serializer=eventarc.UpdateGoogleApiSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_google_api_source'] + + @property + def delete_google_api_source(self) -> Callable[ + [eventarc.DeleteGoogleApiSourceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete google api source method over gRPC. + + Delete a single GoogleApiSource. + + Returns: + Callable[[~.DeleteGoogleApiSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_google_api_source' not in self._stubs: + self._stubs['delete_google_api_source'] = self._logged_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteGoogleApiSource', + request_serializer=eventarc.DeleteGoogleApiSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_google_api_source'] + def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -909,6 +1465,111 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_message_bus: self._wrap_method( + self.get_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.list_message_buses: self._wrap_method( + self.list_message_buses, + default_timeout=None, + client_info=client_info, + ), + self.list_message_bus_enrollments: self._wrap_method( + self.list_message_bus_enrollments, + default_timeout=None, + client_info=client_info, + ), + self.create_message_bus: self._wrap_method( + self.create_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.update_message_bus: self._wrap_method( + self.update_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.delete_message_bus: self._wrap_method( + self.delete_message_bus, + default_timeout=None, + client_info=client_info, + ), + self.get_enrollment: self._wrap_method( + self.get_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.list_enrollments: self._wrap_method( + self.list_enrollments, + default_timeout=None, + client_info=client_info, + ), + self.create_enrollment: self._wrap_method( + self.create_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.update_enrollment: self._wrap_method( + self.update_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.delete_enrollment: self._wrap_method( + self.delete_enrollment, + default_timeout=None, + client_info=client_info, + ), + self.get_pipeline: self._wrap_method( + self.get_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.list_pipelines: self._wrap_method( + self.list_pipelines, + default_timeout=None, + client_info=client_info, + ), + self.create_pipeline: self._wrap_method( + self.create_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.update_pipeline: self._wrap_method( + self.update_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.delete_pipeline: self._wrap_method( + self.delete_pipeline, + default_timeout=None, + client_info=client_info, + ), + self.get_google_api_source: self._wrap_method( + self.get_google_api_source, + default_timeout=None, + client_info=client_info, + ), + self.list_google_api_sources: self._wrap_method( + self.list_google_api_sources, + default_timeout=None, + client_info=client_info, + ), + self.create_google_api_source: self._wrap_method( + self.create_google_api_source, + default_timeout=None, + client_info=client_info, + ), + self.update_google_api_source: self._wrap_method( + self.update_google_api_source, + default_timeout=None, + client_info=client_info, + ), + self.delete_google_api_source: self._wrap_method( + self.delete_google_api_source, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index e7ae204609..d075dd4ffc 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -40,9 +40,13 @@ from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import pipeline from google.cloud.eventarc_v1.types import trigger from google.longrunning import operations_pb2 # type: ignore @@ -104,6 +108,38 @@ def post_create_channel_connection(self, response): logging.log(f"Received response: {response}") return response + def pre_create_enrollment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_enrollment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_google_api_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_google_api_source(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_message_bus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_message_bus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -128,6 +164,38 @@ def post_delete_channel_connection(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_enrollment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_enrollment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_google_api_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_google_api_source(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_message_bus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_message_bus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -152,6 +220,22 @@ def post_get_channel_connection(self, response): logging.log(f"Received response: {response}") return response + def pre_get_enrollment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_enrollment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_google_api_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_google_api_source(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_google_channel_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -160,6 +244,22 @@ def post_get_google_channel_config(self, response): logging.log(f"Received response: {response}") return response + def pre_get_message_bus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_message_bus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_provider(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -192,6 +292,46 @@ def post_list_channels(self, response): logging.log(f"Received response: {response}") return response + def pre_list_enrollments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_enrollments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_google_api_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_google_api_sources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_message_bus_enrollments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_message_bus_enrollments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_message_buses(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_message_buses(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_pipelines(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_pipelines(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_providers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -216,6 +356,22 @@ def post_update_channel(self, response): logging.log(f"Received response: {response}") return response + def pre_update_enrollment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_enrollment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_google_api_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_google_api_source(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_google_channel_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -224,6 +380,22 @@ def post_update_google_channel_config(self, response): logging.log(f"Received response: {response}") return response + def pre_update_message_bus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_message_bus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -309,6 +481,150 @@ def post_create_channel_connection_with_metadata(self, response: operations_pb2. """ return response, metadata + def pre_create_enrollment(self, request: eventarc.CreateEnrollmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateEnrollmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_enrollment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_enrollment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_enrollment + + DEPRECATED. Please use the `post_create_enrollment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_create_enrollment` interceptor runs + before the `post_create_enrollment_with_metadata` interceptor. + """ + return response + + def post_create_enrollment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_enrollment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_enrollment_with_metadata` + interceptor in new development instead of the `post_create_enrollment` interceptor. + When both interceptors are used, this `post_create_enrollment_with_metadata` interceptor runs after the + `post_create_enrollment` interceptor. The (possibly modified) response returned by + `post_create_enrollment` will be passed to + `post_create_enrollment_with_metadata`. + """ + return response, metadata + + def pre_create_google_api_source(self, request: eventarc.CreateGoogleApiSourceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateGoogleApiSourceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_google_api_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_google_api_source(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_google_api_source + + DEPRECATED. Please use the `post_create_google_api_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_create_google_api_source` interceptor runs + before the `post_create_google_api_source_with_metadata` interceptor. + """ + return response + + def post_create_google_api_source_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_google_api_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_google_api_source_with_metadata` + interceptor in new development instead of the `post_create_google_api_source` interceptor. + When both interceptors are used, this `post_create_google_api_source_with_metadata` interceptor runs after the + `post_create_google_api_source` interceptor. The (possibly modified) response returned by + `post_create_google_api_source` will be passed to + `post_create_google_api_source_with_metadata`. + """ + return response, metadata + + def pre_create_message_bus(self, request: eventarc.CreateMessageBusRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateMessageBusRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_message_bus + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_message_bus(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_message_bus + + DEPRECATED. Please use the `post_create_message_bus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_create_message_bus` interceptor runs + before the `post_create_message_bus_with_metadata` interceptor. + """ + return response + + def post_create_message_bus_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_message_bus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_message_bus_with_metadata` + interceptor in new development instead of the `post_create_message_bus` interceptor. + When both interceptors are used, this `post_create_message_bus_with_metadata` interceptor runs after the + `post_create_message_bus` interceptor. The (possibly modified) response returned by + `post_create_message_bus` will be passed to + `post_create_message_bus_with_metadata`. + """ + return response, metadata + + def pre_create_pipeline(self, request: eventarc.CreatePipelineRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreatePipelineRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_pipeline(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_pipeline + + DEPRECATED. Please use the `post_create_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_create_pipeline` interceptor runs + before the `post_create_pipeline_with_metadata` interceptor. + """ + return response + + def post_create_pipeline_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_pipeline_with_metadata` + interceptor in new development instead of the `post_create_pipeline` interceptor. + When both interceptors are used, this `post_create_pipeline_with_metadata` interceptor runs after the + `post_create_pipeline` interceptor. The (possibly modified) response returned by + `post_create_pipeline` will be passed to + `post_create_pipeline_with_metadata`. + """ + return response, metadata + def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_trigger @@ -417,115 +733,331 @@ def post_delete_channel_connection_with_metadata(self, response: operations_pb2. """ return response, metadata - def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_trigger + def pre_delete_enrollment(self, request: eventarc.DeleteEnrollmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteEnrollmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_enrollment Override in a subclass to manipulate the request or metadata before they are sent to the Eventarc server. """ return request, metadata - def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_trigger + def post_delete_enrollment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_enrollment - DEPRECATED. Please use the `post_delete_trigger_with_metadata` + DEPRECATED. Please use the `post_delete_enrollment_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. This `post_delete_trigger` interceptor runs - before the `post_delete_trigger_with_metadata` interceptor. + it is returned to user code. This `post_delete_enrollment` interceptor runs + before the `post_delete_enrollment_with_metadata` interceptor. """ return response - def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_trigger + def post_delete_enrollment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_enrollment Override in a subclass to read or manipulate the response or metadata after it is returned by the Eventarc server but before it is returned to user code. - We recommend only using this `post_delete_trigger_with_metadata` - interceptor in new development instead of the `post_delete_trigger` interceptor. - When both interceptors are used, this `post_delete_trigger_with_metadata` interceptor runs after the - `post_delete_trigger` interceptor. The (possibly modified) response returned by - `post_delete_trigger` will be passed to - `post_delete_trigger_with_metadata`. + We recommend only using this `post_delete_enrollment_with_metadata` + interceptor in new development instead of the `post_delete_enrollment` interceptor. + When both interceptors are used, this `post_delete_enrollment_with_metadata` interceptor runs after the + `post_delete_enrollment` interceptor. The (possibly modified) response returned by + `post_delete_enrollment` will be passed to + `post_delete_enrollment_with_metadata`. """ return response, metadata - def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_channel + def pre_delete_google_api_source(self, request: eventarc.DeleteGoogleApiSourceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteGoogleApiSourceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_google_api_source Override in a subclass to manipulate the request or metadata before they are sent to the Eventarc server. """ return request, metadata - def post_get_channel(self, response: channel.Channel) -> channel.Channel: - """Post-rpc interceptor for get_channel + def post_delete_google_api_source(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_google_api_source - DEPRECATED. Please use the `post_get_channel_with_metadata` + DEPRECATED. Please use the `post_delete_google_api_source_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. This `post_get_channel` interceptor runs - before the `post_get_channel_with_metadata` interceptor. + it is returned to user code. This `post_delete_google_api_source` interceptor runs + before the `post_delete_google_api_source_with_metadata` interceptor. """ return response - def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_channel + def post_delete_google_api_source_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_google_api_source Override in a subclass to read or manipulate the response or metadata after it is returned by the Eventarc server but before it is returned to user code. - We recommend only using this `post_get_channel_with_metadata` - interceptor in new development instead of the `post_get_channel` interceptor. - When both interceptors are used, this `post_get_channel_with_metadata` interceptor runs after the - `post_get_channel` interceptor. The (possibly modified) response returned by - `post_get_channel` will be passed to - `post_get_channel_with_metadata`. + We recommend only using this `post_delete_google_api_source_with_metadata` + interceptor in new development instead of the `post_delete_google_api_source` interceptor. + When both interceptors are used, this `post_delete_google_api_source_with_metadata` interceptor runs after the + `post_delete_google_api_source` interceptor. The (possibly modified) response returned by + `post_delete_google_api_source` will be passed to + `post_delete_google_api_source_with_metadata`. """ return response, metadata - def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_channel_connection + def pre_delete_message_bus(self, request: eventarc.DeleteMessageBusRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteMessageBusRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_message_bus Override in a subclass to manipulate the request or metadata before they are sent to the Eventarc server. """ return request, metadata - def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: - """Post-rpc interceptor for get_channel_connection + def post_delete_message_bus(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_message_bus - DEPRECATED. Please use the `post_get_channel_connection_with_metadata` + DEPRECATED. Please use the `post_delete_message_bus_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. This `post_get_channel_connection` interceptor runs - before the `post_get_channel_connection_with_metadata` interceptor. + it is returned to user code. This `post_delete_message_bus` interceptor runs + before the `post_delete_message_bus_with_metadata` interceptor. """ return response - def post_get_channel_connection_with_metadata(self, response: channel_connection.ChannelConnection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_channel_connection + def post_delete_message_bus_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_message_bus Override in a subclass to read or manipulate the response or metadata after it is returned by the Eventarc server but before it is returned to user code. - We recommend only using this `post_get_channel_connection_with_metadata` - interceptor in new development instead of the `post_get_channel_connection` interceptor. - When both interceptors are used, this `post_get_channel_connection_with_metadata` interceptor runs after the - `post_get_channel_connection` interceptor. The (possibly modified) response returned by - `post_get_channel_connection` will be passed to - `post_get_channel_connection_with_metadata`. + We recommend only using this `post_delete_message_bus_with_metadata` + interceptor in new development instead of the `post_delete_message_bus` interceptor. + When both interceptors are used, this `post_delete_message_bus_with_metadata` interceptor runs after the + `post_delete_message_bus` interceptor. The (possibly modified) response returned by + `post_delete_message_bus` will be passed to + `post_delete_message_bus_with_metadata`. """ return response, metadata - def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_pipeline(self, request: eventarc.DeletePipelineRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeletePipelineRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_pipeline(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_pipeline + + DEPRECATED. Please use the `post_delete_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_delete_pipeline` interceptor runs + before the `post_delete_pipeline_with_metadata` interceptor. + """ + return response + + def post_delete_pipeline_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_pipeline_with_metadata` + interceptor in new development instead of the `post_delete_pipeline` interceptor. + When both interceptors are used, this `post_delete_pipeline_with_metadata` interceptor runs after the + `post_delete_pipeline` interceptor. The (possibly modified) response returned by + `post_delete_pipeline` will be passed to + `post_delete_pipeline_with_metadata`. + """ + return response, metadata + + def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_trigger + + DEPRECATED. Please use the `post_delete_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_delete_trigger` interceptor runs + before the `post_delete_trigger_with_metadata` interceptor. + """ + return response + + def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_trigger_with_metadata` + interceptor in new development instead of the `post_delete_trigger` interceptor. + When both interceptors are used, this `post_delete_trigger_with_metadata` interceptor runs after the + `post_delete_trigger` interceptor. The (possibly modified) response returned by + `post_delete_trigger` will be passed to + `post_delete_trigger_with_metadata`. + """ + return response, metadata + + def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_channel(self, response: channel.Channel) -> channel.Channel: + """Post-rpc interceptor for get_channel + + DEPRECATED. Please use the `post_get_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_get_channel` interceptor runs + before the `post_get_channel_with_metadata` interceptor. + """ + return response + + def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_with_metadata` + interceptor in new development instead of the `post_get_channel` interceptor. + When both interceptors are used, this `post_get_channel_with_metadata` interceptor runs after the + `post_get_channel` interceptor. The (possibly modified) response returned by + `post_get_channel` will be passed to + `post_get_channel_with_metadata`. + """ + return response, metadata + + def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_channel_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: + """Post-rpc interceptor for get_channel_connection + + DEPRECATED. Please use the `post_get_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_get_channel_connection` interceptor runs + before the `post_get_channel_connection_with_metadata` interceptor. + """ + return response + + def post_get_channel_connection_with_metadata(self, response: channel_connection.ChannelConnection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_connection_with_metadata` + interceptor in new development instead of the `post_get_channel_connection` interceptor. + When both interceptors are used, this `post_get_channel_connection_with_metadata` interceptor runs after the + `post_get_channel_connection` interceptor. The (possibly modified) response returned by + `post_get_channel_connection` will be passed to + `post_get_channel_connection_with_metadata`. + """ + return response, metadata + + def pre_get_enrollment(self, request: eventarc.GetEnrollmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetEnrollmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_enrollment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_enrollment(self, response: enrollment.Enrollment) -> enrollment.Enrollment: + """Post-rpc interceptor for get_enrollment + + DEPRECATED. Please use the `post_get_enrollment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_get_enrollment` interceptor runs + before the `post_get_enrollment_with_metadata` interceptor. + """ + return response + + def post_get_enrollment_with_metadata(self, response: enrollment.Enrollment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[enrollment.Enrollment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_enrollment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_enrollment_with_metadata` + interceptor in new development instead of the `post_get_enrollment` interceptor. + When both interceptors are used, this `post_get_enrollment_with_metadata` interceptor runs after the + `post_get_enrollment` interceptor. The (possibly modified) response returned by + `post_get_enrollment` will be passed to + `post_get_enrollment_with_metadata`. + """ + return response, metadata + + def pre_get_google_api_source(self, request: eventarc.GetGoogleApiSourceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleApiSourceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_google_api_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_google_api_source(self, response: google_api_source.GoogleApiSource) -> google_api_source.GoogleApiSource: + """Post-rpc interceptor for get_google_api_source + + DEPRECATED. Please use the `post_get_google_api_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_get_google_api_source` interceptor runs + before the `post_get_google_api_source_with_metadata` interceptor. + """ + return response + + def post_get_google_api_source_with_metadata(self, response: google_api_source.GoogleApiSource, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[google_api_source.GoogleApiSource, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_google_api_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_google_api_source_with_metadata` + interceptor in new development instead of the `post_get_google_api_source` interceptor. + When both interceptors are used, this `post_get_google_api_source_with_metadata` interceptor runs after the + `post_get_google_api_source` interceptor. The (possibly modified) response returned by + `post_get_google_api_source` will be passed to + `post_get_google_api_source_with_metadata`. + """ + return response, metadata + + def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_google_channel_config Override in a subclass to manipulate the request or metadata @@ -561,6 +1093,78 @@ def post_get_google_channel_config_with_metadata(self, response: google_channel_ """ return response, metadata + def pre_get_message_bus(self, request: eventarc.GetMessageBusRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetMessageBusRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_message_bus + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_message_bus(self, response: message_bus.MessageBus) -> message_bus.MessageBus: + """Post-rpc interceptor for get_message_bus + + DEPRECATED. Please use the `post_get_message_bus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_get_message_bus` interceptor runs + before the `post_get_message_bus_with_metadata` interceptor. + """ + return response + + def post_get_message_bus_with_metadata(self, response: message_bus.MessageBus, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[message_bus.MessageBus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_message_bus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_message_bus_with_metadata` + interceptor in new development instead of the `post_get_message_bus` interceptor. + When both interceptors are used, this `post_get_message_bus_with_metadata` interceptor runs after the + `post_get_message_bus` interceptor. The (possibly modified) response returned by + `post_get_message_bus` will be passed to + `post_get_message_bus_with_metadata`. + """ + return response, metadata + + def pre_get_pipeline(self, request: eventarc.GetPipelineRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetPipelineRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_pipeline(self, response: pipeline.Pipeline) -> pipeline.Pipeline: + """Post-rpc interceptor for get_pipeline + + DEPRECATED. Please use the `post_get_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_get_pipeline` interceptor runs + before the `post_get_pipeline_with_metadata` interceptor. + """ + return response + + def post_get_pipeline_with_metadata(self, response: pipeline.Pipeline, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[pipeline.Pipeline, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_pipeline_with_metadata` + interceptor in new development instead of the `post_get_pipeline` interceptor. + When both interceptors are used, this `post_get_pipeline_with_metadata` interceptor runs after the + `post_get_pipeline` interceptor. The (possibly modified) response returned by + `post_get_pipeline` will be passed to + `post_get_pipeline_with_metadata`. + """ + return response, metadata + def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_provider @@ -705,114 +1309,366 @@ def post_list_channels_with_metadata(self, response: eventarc.ListChannelsRespon """ return response, metadata - def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_providers + def pre_list_enrollments(self, request: eventarc.ListEnrollmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListEnrollmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_enrollments Override in a subclass to manipulate the request or metadata before they are sent to the Eventarc server. """ return request, metadata - def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: - """Post-rpc interceptor for list_providers + def post_list_enrollments(self, response: eventarc.ListEnrollmentsResponse) -> eventarc.ListEnrollmentsResponse: + """Post-rpc interceptor for list_enrollments - DEPRECATED. Please use the `post_list_providers_with_metadata` + DEPRECATED. Please use the `post_list_enrollments_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. This `post_list_providers` interceptor runs - before the `post_list_providers_with_metadata` interceptor. + it is returned to user code. This `post_list_enrollments` interceptor runs + before the `post_list_enrollments_with_metadata` interceptor. """ return response - def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_providers + def post_list_enrollments_with_metadata(self, response: eventarc.ListEnrollmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListEnrollmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_enrollments Override in a subclass to read or manipulate the response or metadata after it is returned by the Eventarc server but before it is returned to user code. - We recommend only using this `post_list_providers_with_metadata` - interceptor in new development instead of the `post_list_providers` interceptor. - When both interceptors are used, this `post_list_providers_with_metadata` interceptor runs after the - `post_list_providers` interceptor. The (possibly modified) response returned by - `post_list_providers` will be passed to - `post_list_providers_with_metadata`. + We recommend only using this `post_list_enrollments_with_metadata` + interceptor in new development instead of the `post_list_enrollments` interceptor. + When both interceptors are used, this `post_list_enrollments_with_metadata` interceptor runs after the + `post_list_enrollments` interceptor. The (possibly modified) response returned by + `post_list_enrollments` will be passed to + `post_list_enrollments_with_metadata`. """ return response, metadata - def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_triggers + def pre_list_google_api_sources(self, request: eventarc.ListGoogleApiSourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListGoogleApiSourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_google_api_sources Override in a subclass to manipulate the request or metadata before they are sent to the Eventarc server. """ return request, metadata - def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: - """Post-rpc interceptor for list_triggers + def post_list_google_api_sources(self, response: eventarc.ListGoogleApiSourcesResponse) -> eventarc.ListGoogleApiSourcesResponse: + """Post-rpc interceptor for list_google_api_sources - DEPRECATED. Please use the `post_list_triggers_with_metadata` + DEPRECATED. Please use the `post_list_google_api_sources_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. This `post_list_triggers` interceptor runs - before the `post_list_triggers_with_metadata` interceptor. + it is returned to user code. This `post_list_google_api_sources` interceptor runs + before the `post_list_google_api_sources_with_metadata` interceptor. """ return response - def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_triggers + def post_list_google_api_sources_with_metadata(self, response: eventarc.ListGoogleApiSourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListGoogleApiSourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_google_api_sources Override in a subclass to read or manipulate the response or metadata after it is returned by the Eventarc server but before it is returned to user code. - We recommend only using this `post_list_triggers_with_metadata` - interceptor in new development instead of the `post_list_triggers` interceptor. - When both interceptors are used, this `post_list_triggers_with_metadata` interceptor runs after the - `post_list_triggers` interceptor. The (possibly modified) response returned by - `post_list_triggers` will be passed to - `post_list_triggers_with_metadata`. + We recommend only using this `post_list_google_api_sources_with_metadata` + interceptor in new development instead of the `post_list_google_api_sources` interceptor. + When both interceptors are used, this `post_list_google_api_sources_with_metadata` interceptor runs after the + `post_list_google_api_sources` interceptor. The (possibly modified) response returned by + `post_list_google_api_sources` will be passed to + `post_list_google_api_sources_with_metadata`. """ return response, metadata - def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_channel + def pre_list_message_bus_enrollments(self, request: eventarc.ListMessageBusEnrollmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListMessageBusEnrollmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_message_bus_enrollments Override in a subclass to manipulate the request or metadata before they are sent to the Eventarc server. """ return request, metadata - def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_channel + def post_list_message_bus_enrollments(self, response: eventarc.ListMessageBusEnrollmentsResponse) -> eventarc.ListMessageBusEnrollmentsResponse: + """Post-rpc interceptor for list_message_bus_enrollments - DEPRECATED. Please use the `post_update_channel_with_metadata` + DEPRECATED. Please use the `post_list_message_bus_enrollments_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. This `post_update_channel` interceptor runs - before the `post_update_channel_with_metadata` interceptor. + it is returned to user code. This `post_list_message_bus_enrollments` interceptor runs + before the `post_list_message_bus_enrollments_with_metadata` interceptor. """ return response - def post_update_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_channel + def post_list_message_bus_enrollments_with_metadata(self, response: eventarc.ListMessageBusEnrollmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListMessageBusEnrollmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_message_bus_enrollments Override in a subclass to read or manipulate the response or metadata after it is returned by the Eventarc server but before it is returned to user code. - We recommend only using this `post_update_channel_with_metadata` - interceptor in new development instead of the `post_update_channel` interceptor. - When both interceptors are used, this `post_update_channel_with_metadata` interceptor runs after the + We recommend only using this `post_list_message_bus_enrollments_with_metadata` + interceptor in new development instead of the `post_list_message_bus_enrollments` interceptor. + When both interceptors are used, this `post_list_message_bus_enrollments_with_metadata` interceptor runs after the + `post_list_message_bus_enrollments` interceptor. The (possibly modified) response returned by + `post_list_message_bus_enrollments` will be passed to + `post_list_message_bus_enrollments_with_metadata`. + """ + return response, metadata + + def pre_list_message_buses(self, request: eventarc.ListMessageBusesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListMessageBusesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_message_buses + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_message_buses(self, response: eventarc.ListMessageBusesResponse) -> eventarc.ListMessageBusesResponse: + """Post-rpc interceptor for list_message_buses + + DEPRECATED. Please use the `post_list_message_buses_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_list_message_buses` interceptor runs + before the `post_list_message_buses_with_metadata` interceptor. + """ + return response + + def post_list_message_buses_with_metadata(self, response: eventarc.ListMessageBusesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListMessageBusesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_message_buses + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_message_buses_with_metadata` + interceptor in new development instead of the `post_list_message_buses` interceptor. + When both interceptors are used, this `post_list_message_buses_with_metadata` interceptor runs after the + `post_list_message_buses` interceptor. The (possibly modified) response returned by + `post_list_message_buses` will be passed to + `post_list_message_buses_with_metadata`. + """ + return response, metadata + + def pre_list_pipelines(self, request: eventarc.ListPipelinesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListPipelinesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_pipelines + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_pipelines(self, response: eventarc.ListPipelinesResponse) -> eventarc.ListPipelinesResponse: + """Post-rpc interceptor for list_pipelines + + DEPRECATED. Please use the `post_list_pipelines_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_list_pipelines` interceptor runs + before the `post_list_pipelines_with_metadata` interceptor. + """ + return response + + def post_list_pipelines_with_metadata(self, response: eventarc.ListPipelinesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListPipelinesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_pipelines + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_pipelines_with_metadata` + interceptor in new development instead of the `post_list_pipelines` interceptor. + When both interceptors are used, this `post_list_pipelines_with_metadata` interceptor runs after the + `post_list_pipelines` interceptor. The (possibly modified) response returned by + `post_list_pipelines` will be passed to + `post_list_pipelines_with_metadata`. + """ + return response, metadata + + def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_providers + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: + """Post-rpc interceptor for list_providers + + DEPRECATED. Please use the `post_list_providers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_list_providers` interceptor runs + before the `post_list_providers_with_metadata` interceptor. + """ + return response + + def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_providers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_providers_with_metadata` + interceptor in new development instead of the `post_list_providers` interceptor. + When both interceptors are used, this `post_list_providers_with_metadata` interceptor runs after the + `post_list_providers` interceptor. The (possibly modified) response returned by + `post_list_providers` will be passed to + `post_list_providers_with_metadata`. + """ + return response, metadata + + def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: + """Post-rpc interceptor for list_triggers + + DEPRECATED. Please use the `post_list_triggers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_list_triggers` interceptor runs + before the `post_list_triggers_with_metadata` interceptor. + """ + return response + + def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_triggers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_triggers_with_metadata` + interceptor in new development instead of the `post_list_triggers` interceptor. + When both interceptors are used, this `post_list_triggers_with_metadata` interceptor runs after the + `post_list_triggers` interceptor. The (possibly modified) response returned by + `post_list_triggers` will be passed to + `post_list_triggers_with_metadata`. + """ + return response, metadata + + def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_channel + + DEPRECATED. Please use the `post_update_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_update_channel` interceptor runs + before the `post_update_channel_with_metadata` interceptor. + """ + return response + + def post_update_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_channel_with_metadata` + interceptor in new development instead of the `post_update_channel` interceptor. + When both interceptors are used, this `post_update_channel_with_metadata` interceptor runs after the `post_update_channel` interceptor. The (possibly modified) response returned by `post_update_channel` will be passed to `post_update_channel_with_metadata`. """ return response, metadata + def pre_update_enrollment(self, request: eventarc.UpdateEnrollmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateEnrollmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_enrollment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_enrollment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_enrollment + + DEPRECATED. Please use the `post_update_enrollment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_update_enrollment` interceptor runs + before the `post_update_enrollment_with_metadata` interceptor. + """ + return response + + def post_update_enrollment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_enrollment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_enrollment_with_metadata` + interceptor in new development instead of the `post_update_enrollment` interceptor. + When both interceptors are used, this `post_update_enrollment_with_metadata` interceptor runs after the + `post_update_enrollment` interceptor. The (possibly modified) response returned by + `post_update_enrollment` will be passed to + `post_update_enrollment_with_metadata`. + """ + return response, metadata + + def pre_update_google_api_source(self, request: eventarc.UpdateGoogleApiSourceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateGoogleApiSourceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_google_api_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_google_api_source(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_google_api_source + + DEPRECATED. Please use the `post_update_google_api_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_update_google_api_source` interceptor runs + before the `post_update_google_api_source_with_metadata` interceptor. + """ + return response + + def post_update_google_api_source_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_google_api_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_google_api_source_with_metadata` + interceptor in new development instead of the `post_update_google_api_source` interceptor. + When both interceptors are used, this `post_update_google_api_source_with_metadata` interceptor runs after the + `post_update_google_api_source` interceptor. The (possibly modified) response returned by + `post_update_google_api_source` will be passed to + `post_update_google_api_source_with_metadata`. + """ + return response, metadata + def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_google_channel_config @@ -849,6 +1705,78 @@ def post_update_google_channel_config_with_metadata(self, response: gce_google_c """ return response, metadata + def pre_update_message_bus(self, request: eventarc.UpdateMessageBusRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateMessageBusRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_message_bus + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_message_bus(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_message_bus + + DEPRECATED. Please use the `post_update_message_bus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_update_message_bus` interceptor runs + before the `post_update_message_bus_with_metadata` interceptor. + """ + return response + + def post_update_message_bus_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_message_bus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_message_bus_with_metadata` + interceptor in new development instead of the `post_update_message_bus` interceptor. + When both interceptors are used, this `post_update_message_bus_with_metadata` interceptor runs after the + `post_update_message_bus` interceptor. The (possibly modified) response returned by + `post_update_message_bus` will be passed to + `post_update_message_bus_with_metadata`. + """ + return response, metadata + + def pre_update_pipeline(self, request: eventarc.UpdatePipelineRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdatePipelineRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_pipeline(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_pipeline + + DEPRECATED. Please use the `post_update_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. This `post_update_pipeline` interceptor runs + before the `post_update_pipeline_with_metadata` interceptor. + """ + return response + + def post_update_pipeline_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_pipeline_with_metadata` + interceptor in new development instead of the `post_update_pipeline` interceptor. + When both interceptors are used, this `post_update_pipeline_with_metadata` interceptor runs after the + `post_update_pipeline` interceptor. The (possibly modified) response returned by + `post_update_pipeline` will be passed to + `post_update_pipeline_with_metadata`. + """ + return response, metadata + def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_trigger @@ -1322,7 +2250,2602 @@ def __call__(self, resp, _ = self._interceptor.post_create_channel_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateChannelConnection(_BaseEventarcRestTransport._BaseCreateChannelConnection, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CreateChannelConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: eventarc.CreateChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create channel connection method over HTTP. + + Args: + request (~.eventarc.CreateChannelConnectionRequest): + The request object. The request message for the + CreateChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_http_options() + + request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannelConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._CreateChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateEnrollment(_BaseEventarcRestTransport._BaseCreateEnrollment, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CreateEnrollment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: eventarc.CreateEnrollmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create enrollment method over HTTP. + + Args: + request (~.eventarc.CreateEnrollmentRequest): + The request object. The request message for the + CreateEnrollment method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseCreateEnrollment._get_http_options() + + request, metadata = self._interceptor.pre_create_enrollment(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseCreateEnrollment._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseCreateEnrollment._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseCreateEnrollment._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateEnrollment", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateEnrollment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._CreateEnrollment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_enrollment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_enrollment_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_enrollment", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateEnrollment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateGoogleApiSource(_BaseEventarcRestTransport._BaseCreateGoogleApiSource, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CreateGoogleApiSource") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: eventarc.CreateGoogleApiSourceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create google api source method over HTTP. + + Args: + request (~.eventarc.CreateGoogleApiSourceRequest): + The request object. The request message for the + CreateGoogleApiSource method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseCreateGoogleApiSource._get_http_options() + + request, metadata = self._interceptor.pre_create_google_api_source(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseCreateGoogleApiSource._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseCreateGoogleApiSource._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseCreateGoogleApiSource._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateGoogleApiSource", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateGoogleApiSource", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._CreateGoogleApiSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_google_api_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_google_api_source_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_google_api_source", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateGoogleApiSource", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateMessageBus(_BaseEventarcRestTransport._BaseCreateMessageBus, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CreateMessageBus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: eventarc.CreateMessageBusRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create message bus method over HTTP. + + Args: + request (~.eventarc.CreateMessageBusRequest): + The request object. The request message for the + CreateMessageBus method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseCreateMessageBus._get_http_options() + + request, metadata = self._interceptor.pre_create_message_bus(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseCreateMessageBus._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseCreateMessageBus._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseCreateMessageBus._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateMessageBus", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateMessageBus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._CreateMessageBus._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_message_bus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_message_bus_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_message_bus", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateMessageBus", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreatePipeline(_BaseEventarcRestTransport._BaseCreatePipeline, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CreatePipeline") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: eventarc.CreatePipelineRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create pipeline method over HTTP. + + Args: + request (~.eventarc.CreatePipelineRequest): + The request object. The request message for the + CreatePipeline method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseCreatePipeline._get_http_options() + + request, metadata = self._interceptor.pre_create_pipeline(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseCreatePipeline._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseCreatePipeline._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseCreatePipeline._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreatePipeline", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreatePipeline", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._CreatePipeline._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_pipeline_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_pipeline", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreatePipeline", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateTrigger(_BaseEventarcRestTransport._BaseCreateTrigger, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CreateTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: eventarc.CreateTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create trigger method over HTTP. + + Args: + request (~.eventarc.CreateTriggerRequest): + The request object. The request message for the + CreateTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseCreateTrigger._get_http_options() + + request, metadata = self._interceptor.pre_create_trigger(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseCreateTrigger._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseCreateTrigger._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._CreateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteChannel(_BaseEventarcRestTransport._BaseDeleteChannel, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteChannel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.DeleteChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete channel method over HTTP. + + Args: + request (~.eventarc.DeleteChannelRequest): + The request object. The request message for the + DeleteChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseDeleteChannel._get_http_options() + + request, metadata = self._interceptor.pre_delete_channel(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannel._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._DeleteChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteChannelConnection(_BaseEventarcRestTransport._BaseDeleteChannelConnection, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteChannelConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.DeleteChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete channel connection method over HTTP. + + Args: + request (~.eventarc.DeleteChannelConnectionRequest): + The request object. The request message for the + DeleteChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_http_options() + + request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannelConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._DeleteChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEnrollment(_BaseEventarcRestTransport._BaseDeleteEnrollment, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteEnrollment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.DeleteEnrollmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete enrollment method over HTTP. + + Args: + request (~.eventarc.DeleteEnrollmentRequest): + The request object. The request message for the + DeleteEnrollment method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseDeleteEnrollment._get_http_options() + + request, metadata = self._interceptor.pre_delete_enrollment(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseDeleteEnrollment._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseDeleteEnrollment._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteEnrollment", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteEnrollment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._DeleteEnrollment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_enrollment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_enrollment_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_enrollment", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteEnrollment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteGoogleApiSource(_BaseEventarcRestTransport._BaseDeleteGoogleApiSource, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteGoogleApiSource") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.DeleteGoogleApiSourceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete google api source method over HTTP. + + Args: + request (~.eventarc.DeleteGoogleApiSourceRequest): + The request object. The request message for the + DeleteGoogleApiSource method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseDeleteGoogleApiSource._get_http_options() + + request, metadata = self._interceptor.pre_delete_google_api_source(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseDeleteGoogleApiSource._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseDeleteGoogleApiSource._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteGoogleApiSource", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteGoogleApiSource", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._DeleteGoogleApiSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_google_api_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_google_api_source_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_google_api_source", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteGoogleApiSource", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteMessageBus(_BaseEventarcRestTransport._BaseDeleteMessageBus, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteMessageBus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.DeleteMessageBusRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete message bus method over HTTP. + + Args: + request (~.eventarc.DeleteMessageBusRequest): + The request object. The request message for the + DeleteMessageBus method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseDeleteMessageBus._get_http_options() + + request, metadata = self._interceptor.pre_delete_message_bus(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseDeleteMessageBus._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseDeleteMessageBus._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteMessageBus", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteMessageBus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._DeleteMessageBus._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_message_bus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_message_bus_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_message_bus", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteMessageBus", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeletePipeline(_BaseEventarcRestTransport._BaseDeletePipeline, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeletePipeline") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.DeletePipelineRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete pipeline method over HTTP. + + Args: + request (~.eventarc.DeletePipelineRequest): + The request object. The request message for the + DeletePipeline method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseDeletePipeline._get_http_options() + + request, metadata = self._interceptor.pre_delete_pipeline(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseDeletePipeline._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseDeletePipeline._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeletePipeline", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeletePipeline", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._DeletePipeline._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_pipeline_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_pipeline", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeletePipeline", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteTrigger(_BaseEventarcRestTransport._BaseDeleteTrigger, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.DeleteTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete trigger method over HTTP. + + Args: + request (~.eventarc.DeleteTriggerRequest): + The request object. The request message for the + DeleteTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEventarcRestTransport._BaseDeleteTrigger._get_http_options() + + request, metadata = self._interceptor.pre_delete_trigger(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseDeleteTrigger._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._DeleteTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetChannel(_BaseEventarcRestTransport._BaseGetChannel, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetChannel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> channel.Channel: + r"""Call the get channel method over HTTP. + + Args: + request (~.eventarc.GetChannelRequest): + The request object. The request message for the + GetChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.channel.Channel: + A representation of the Channel + resource. A Channel is a resource on + which event providers publish their + events. The published events are + delivered through the transport + associated with the channel. Note that a + channel is associated with exactly one + event provider. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetChannel._get_http_options() + + request, metadata = self._interceptor.pre_get_channel(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetChannel._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = channel.Channel() + pb_resp = channel.Channel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = channel.Channel.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetChannelConnection(_BaseEventarcRestTransport._BaseGetChannelConnection, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetChannelConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> channel_connection.ChannelConnection: + r"""Call the get channel connection method over HTTP. + + Args: + request (~.eventarc.GetChannelConnectionRequest): + The request object. The request message for the + GetChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.channel_connection.ChannelConnection: + A representation of the + ChannelConnection resource. A + ChannelConnection is a resource which + event providers create during the + activation process to establish a + connection between the provider and the + subscriber channel. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetChannelConnection._get_http_options() + + request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetChannelConnection._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannelConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = channel_connection.ChannelConnection() + pb_resp = channel_connection.ChannelConnection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = channel_connection.ChannelConnection.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetEnrollment(_BaseEventarcRestTransport._BaseGetEnrollment, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetEnrollment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetEnrollmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> enrollment.Enrollment: + r"""Call the get enrollment method over HTTP. + + Args: + request (~.eventarc.GetEnrollmentRequest): + The request object. The request message for the + GetEnrollment method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.enrollment.Enrollment: + An enrollment represents a + subscription for messages on a + particular message bus. It defines a + matching criteria for messages on the + bus and the subscriber endpoint where + matched messages should be delivered. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetEnrollment._get_http_options() + + request, metadata = self._interceptor.pre_get_enrollment(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetEnrollment._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetEnrollment._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetEnrollment", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetEnrollment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetEnrollment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = enrollment.Enrollment() + pb_resp = enrollment.Enrollment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_enrollment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_enrollment_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = enrollment.Enrollment.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_enrollment", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetEnrollment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGoogleApiSource(_BaseEventarcRestTransport._BaseGetGoogleApiSource, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetGoogleApiSource") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetGoogleApiSourceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> google_api_source.GoogleApiSource: + r"""Call the get google api source method over HTTP. + + Args: + request (~.eventarc.GetGoogleApiSourceRequest): + The request object. The request message for the + GetGoogleApiSource method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.google_api_source.GoogleApiSource: + A GoogleApiSource represents a + subscription of 1P events from a + MessageBus. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetGoogleApiSource._get_http_options() + + request, metadata = self._interceptor.pre_get_google_api_source(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleApiSource._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetGoogleApiSource._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleApiSource", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleApiSource", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetGoogleApiSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = google_api_source.GoogleApiSource() + pb_resp = google_api_source.GoogleApiSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_google_api_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_google_api_source_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = google_api_source.GoogleApiSource.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_google_api_source", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleApiSource", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGoogleChannelConfig(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetGoogleChannelConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetGoogleChannelConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> google_channel_config.GoogleChannelConfig: + r"""Call the get google channel config method over HTTP. + + Args: + request (~.eventarc.GetGoogleChannelConfigRequest): + The request object. The request message for the + GetGoogleChannelConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.google_channel_config.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_http_options() + + request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleChannelConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = google_channel_config.GoogleChannelConfig() + pb_resp = google_channel_config.GoogleChannelConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_google_channel_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_google_channel_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = google_channel_config.GoogleChannelConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleChannelConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetMessageBus(_BaseEventarcRestTransport._BaseGetMessageBus, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetMessageBus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetMessageBusRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> message_bus.MessageBus: + r"""Call the get message bus method over HTTP. + + Args: + request (~.eventarc.GetMessageBusRequest): + The request object. The request message for the + GetMessageBus method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.message_bus.MessageBus: + MessageBus for the messages flowing + through the system. The admin has + visibility and control over the messages + being published and consumed and can + restrict publishers and subscribers to + only a subset of data available in the + system by defining authorization + policies. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetMessageBus._get_http_options() + + request, metadata = self._interceptor.pre_get_message_bus(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetMessageBus._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetMessageBus._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetMessageBus", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetMessageBus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetMessageBus._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = message_bus.MessageBus() + pb_resp = message_bus.MessageBus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_message_bus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_message_bus_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = message_bus.MessageBus.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_message_bus", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetMessageBus", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetPipeline(_BaseEventarcRestTransport._BaseGetPipeline, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetPipeline") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetPipelineRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> pipeline.Pipeline: + r"""Call the get pipeline method over HTTP. + + Args: + request (~.eventarc.GetPipelineRequest): + The request object. The request message for the + GetPipeline method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pipeline.Pipeline: + A representation of the Pipeline + resource. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetPipeline._get_http_options() + + request, metadata = self._interceptor.pre_get_pipeline(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetPipeline._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetPipeline._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetPipeline", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetPipeline", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetPipeline._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pipeline.Pipeline() + pb_resp = pipeline.Pipeline.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_pipeline_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = pipeline.Pipeline.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_pipeline", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetPipeline", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetProvider(_BaseEventarcRestTransport._BaseGetProvider, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetProvider") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: eventarc.GetProviderRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> discovery.Provider: + r"""Call the get provider method over HTTP. + + Args: + request (~.eventarc.GetProviderRequest): + The request object. The request message for the + GetProvider method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.discovery.Provider: + A representation of the Provider + resource. + + """ + + http_options = _BaseEventarcRestTransport._BaseGetProvider._get_http_options() + + request, metadata = self._interceptor.pre_get_provider(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetProvider._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetProvider", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EventarcRestTransport._GetProvider._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discovery.Provider() + pb_resp = discovery.Provider.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_provider(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_provider_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = discovery.Provider.to_json(response) except: response_payload = None http_response = { @@ -1331,19 +4854,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_", + "Received response for google.cloud.eventarc_v1.EventarcClient.get_provider", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "CreateChannel", + "rpcName": "GetProvider", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _CreateChannelConnection(_BaseEventarcRestTransport._BaseCreateChannelConnection, EventarcRestStub): + class _GetTrigger(_BaseEventarcRestTransport._BaseGetTrigger, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.CreateChannelConnection") + return hash("EventarcRestTransport.GetTrigger") @staticmethod def _get_response( @@ -1364,22 +4887,21 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__(self, - request: eventarc.CreateChannelConnectionRequest, *, + request: eventarc.GetTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create channel connection method over HTTP. + ) -> trigger.Trigger: + r"""Call the get trigger method over HTTP. Args: - request (~.eventarc.CreateChannelConnectionRequest): + request (~.eventarc.GetTriggerRequest): The request object. The request message for the - CreateChannelConnection method. + GetTrigger method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1389,28 +4911,25 @@ def __call__(self, be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.trigger.Trigger: + A representation of the trigger + resource. """ - http_options = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_http_options() - - request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_transcoded_request(http_options, request) + http_options = _BaseEventarcRestTransport._BaseGetTrigger._get_http_options() - body = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_request_body_json(transcoded_request) + request, metadata = self._interceptor.pre_get_trigger(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseGetTrigger._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -1420,17 +4939,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "CreateChannelConnection", + "rpcName": "GetTrigger", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._CreateChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._GetTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1438,15 +4957,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = trigger.Trigger() + pb_resp = trigger.Trigger.pb(resp) - resp = self._interceptor.post_create_channel_connection(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_channel_connection_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_get_trigger_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = trigger.Trigger.to_json(response) except: response_payload = None http_response = { @@ -1455,19 +4976,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_connection", + "Received response for google.cloud.eventarc_v1.EventarcClient.get_trigger", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "CreateChannelConnection", + "rpcName": "GetTrigger", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _CreateTrigger(_BaseEventarcRestTransport._BaseCreateTrigger, EventarcRestStub): + class _ListChannelConnections(_BaseEventarcRestTransport._BaseListChannelConnections, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.CreateTrigger") + return hash("EventarcRestTransport.ListChannelConnections") @staticmethod def _get_response( @@ -1488,22 +5009,21 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__(self, - request: eventarc.CreateTriggerRequest, *, + request: eventarc.ListChannelConnectionsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create trigger method over HTTP. + ) -> eventarc.ListChannelConnectionsResponse: + r"""Call the list channel connections method over HTTP. Args: - request (~.eventarc.CreateTriggerRequest): + request (~.eventarc.ListChannelConnectionsRequest): The request object. The request message for the - CreateTrigger method. + ListChannelConnections method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1513,28 +5033,25 @@ def __call__(self, be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.eventarc.ListChannelConnectionsResponse: + The response message for the + `ListChannelConnections` method. """ - http_options = _BaseEventarcRestTransport._BaseCreateTrigger._get_http_options() - - request, metadata = self._interceptor.pre_create_trigger(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseCreateTrigger._get_transcoded_request(http_options, request) + http_options = _BaseEventarcRestTransport._BaseListChannelConnections._get_http_options() - body = _BaseEventarcRestTransport._BaseCreateTrigger._get_request_body_json(transcoded_request) + request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListChannelConnections._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -1544,17 +5061,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "CreateTrigger", + "rpcName": "ListChannelConnections", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._CreateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._ListChannelConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1562,15 +5079,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = eventarc.ListChannelConnectionsResponse() + pb_resp = eventarc.ListChannelConnectionsResponse.pb(resp) - resp = self._interceptor.post_create_trigger(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_channel_connections(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_trigger_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_channel_connections_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = eventarc.ListChannelConnectionsResponse.to_json(response) except: response_payload = None http_response = { @@ -1579,19 +5098,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.create_trigger", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_channel_connections", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "CreateTrigger", + "rpcName": "ListChannelConnections", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteChannel(_BaseEventarcRestTransport._BaseDeleteChannel, EventarcRestStub): + class _ListChannels(_BaseEventarcRestTransport._BaseListChannels, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.DeleteChannel") + return hash("EventarcRestTransport.ListChannels") @staticmethod def _get_response( @@ -1616,17 +5135,17 @@ def _get_response( return response def __call__(self, - request: eventarc.DeleteChannelRequest, *, + request: eventarc.ListChannelsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete channel method over HTTP. + ) -> eventarc.ListChannelsResponse: + r"""Call the list channels method over HTTP. Args: - request (~.eventarc.DeleteChannelRequest): + request (~.eventarc.ListChannelsRequest): The request object. The request message for the - DeleteChannel method. + ListChannels method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1636,26 +5155,25 @@ def __call__(self, be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.eventarc.ListChannelsResponse: + The response message for the + `ListChannels` method. """ - http_options = _BaseEventarcRestTransport._BaseDeleteChannel._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListChannels._get_http_options() - request, metadata = self._interceptor.pre_delete_channel(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannel._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_channels(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListChannels._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -1665,17 +5183,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "DeleteChannel", + "rpcName": "ListChannels", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._DeleteChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListChannels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1683,15 +5201,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = eventarc.ListChannelsResponse() + pb_resp = eventarc.ListChannelsResponse.pb(resp) - resp = self._interceptor.post_delete_channel(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_channels(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_channel_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_channels_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = eventarc.ListChannelsResponse.to_json(response) except: response_payload = None http_response = { @@ -1700,19 +5220,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_channels", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "DeleteChannel", + "rpcName": "ListChannels", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteChannelConnection(_BaseEventarcRestTransport._BaseDeleteChannelConnection, EventarcRestStub): + class _ListEnrollments(_BaseEventarcRestTransport._BaseListEnrollments, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.DeleteChannelConnection") + return hash("EventarcRestTransport.ListEnrollments") @staticmethod def _get_response( @@ -1737,17 +5257,17 @@ def _get_response( return response def __call__(self, - request: eventarc.DeleteChannelConnectionRequest, *, + request: eventarc.ListEnrollmentsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete channel connection method over HTTP. + ) -> eventarc.ListEnrollmentsResponse: + r"""Call the list enrollments method over HTTP. Args: - request (~.eventarc.DeleteChannelConnectionRequest): + request (~.eventarc.ListEnrollmentsRequest): The request object. The request message for the - DeleteChannelConnection method. + ListEnrollments method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1757,26 +5277,25 @@ def __call__(self, be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.eventarc.ListEnrollmentsResponse: + The response message for the + `ListEnrollments` method. """ - http_options = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListEnrollments._get_http_options() - request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_enrollments(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListEnrollments._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListEnrollments._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -1786,17 +5305,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListEnrollments", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "DeleteChannelConnection", + "rpcName": "ListEnrollments", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._DeleteChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListEnrollments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1804,15 +5323,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = eventarc.ListEnrollmentsResponse() + pb_resp = eventarc.ListEnrollmentsResponse.pb(resp) - resp = self._interceptor.post_delete_channel_connection(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_enrollments(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_channel_connection_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_enrollments_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = eventarc.ListEnrollmentsResponse.to_json(response) except: response_payload = None http_response = { @@ -1821,19 +5342,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_enrollments", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "DeleteChannelConnection", + "rpcName": "ListEnrollments", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteTrigger(_BaseEventarcRestTransport._BaseDeleteTrigger, EventarcRestStub): + class _ListGoogleApiSources(_BaseEventarcRestTransport._BaseListGoogleApiSources, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.DeleteTrigger") + return hash("EventarcRestTransport.ListGoogleApiSources") @staticmethod def _get_response( @@ -1858,17 +5379,17 @@ def _get_response( return response def __call__(self, - request: eventarc.DeleteTriggerRequest, *, + request: eventarc.ListGoogleApiSourcesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete trigger method over HTTP. + ) -> eventarc.ListGoogleApiSourcesResponse: + r"""Call the list google api sources method over HTTP. Args: - request (~.eventarc.DeleteTriggerRequest): + request (~.eventarc.ListGoogleApiSourcesRequest): The request object. The request message for the - DeleteTrigger method. + ListGoogleApiSources method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1878,26 +5399,25 @@ def __call__(self, be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.eventarc.ListGoogleApiSourcesResponse: + The response message for the + `ListGoogleApiSources` method. """ - http_options = _BaseEventarcRestTransport._BaseDeleteTrigger._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListGoogleApiSources._get_http_options() - request, metadata = self._interceptor.pre_delete_trigger(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseDeleteTrigger._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_google_api_sources(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListGoogleApiSources._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListGoogleApiSources._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -1907,17 +5427,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListGoogleApiSources", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "DeleteTrigger", + "rpcName": "ListGoogleApiSources", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._DeleteTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListGoogleApiSources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1925,15 +5445,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = eventarc.ListGoogleApiSourcesResponse() + pb_resp = eventarc.ListGoogleApiSourcesResponse.pb(resp) - resp = self._interceptor.post_delete_trigger(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_google_api_sources(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_trigger_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_google_api_sources_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = eventarc.ListGoogleApiSourcesResponse.to_json(response) except: response_payload = None http_response = { @@ -1942,19 +5464,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_google_api_sources", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "DeleteTrigger", + "rpcName": "ListGoogleApiSources", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetChannel(_BaseEventarcRestTransport._BaseGetChannel, EventarcRestStub): + class _ListMessageBusEnrollments(_BaseEventarcRestTransport._BaseListMessageBusEnrollments, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.GetChannel") + return hash("EventarcRestTransport.ListMessageBusEnrollments") @staticmethod def _get_response( @@ -1979,17 +5501,18 @@ def _get_response( return response def __call__(self, - request: eventarc.GetChannelRequest, *, + request: eventarc.ListMessageBusEnrollmentsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> channel.Channel: - r"""Call the get channel method over HTTP. + ) -> eventarc.ListMessageBusEnrollmentsResponse: + r"""Call the list message bus + enrollments method over HTTP. Args: - request (~.eventarc.GetChannelRequest): + request (~.eventarc.ListMessageBusEnrollmentsRequest): The request object. The request message for the - GetChannel method. + `ListMessageBusEnrollments` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1999,25 +5522,19 @@ def __call__(self, be of type `bytes`. Returns: - ~.channel.Channel: - A representation of the Channel - resource. A Channel is a resource on - which event providers publish their - events. The published events are - delivered through the transport - associated with the channel. Note that a - channel is associated with exactly one - event provider. + ~.eventarc.ListMessageBusEnrollmentsResponse: + The response message for the + `ListMessageBusEnrollments` method.` """ - http_options = _BaseEventarcRestTransport._BaseGetChannel._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListMessageBusEnrollments._get_http_options() - request, metadata = self._interceptor.pre_get_channel(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetChannel._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_message_bus_enrollments(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListMessageBusEnrollments._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListMessageBusEnrollments._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) @@ -2033,17 +5550,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListMessageBusEnrollments", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetChannel", + "rpcName": "ListMessageBusEnrollments", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._GetChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListMessageBusEnrollments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2051,17 +5568,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = channel.Channel() - pb_resp = channel.Channel.pb(resp) + resp = eventarc.ListMessageBusEnrollmentsResponse() + pb_resp = eventarc.ListMessageBusEnrollmentsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_channel(resp) + resp = self._interceptor.post_list_message_bus_enrollments(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_channel_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_message_bus_enrollments_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = channel.Channel.to_json(response) + response_payload = eventarc.ListMessageBusEnrollmentsResponse.to_json(response) except: response_payload = None http_response = { @@ -2070,19 +5587,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_message_bus_enrollments", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetChannel", + "rpcName": "ListMessageBusEnrollments", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetChannelConnection(_BaseEventarcRestTransport._BaseGetChannelConnection, EventarcRestStub): + class _ListMessageBuses(_BaseEventarcRestTransport._BaseListMessageBuses, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.GetChannelConnection") + return hash("EventarcRestTransport.ListMessageBuses") @staticmethod def _get_response( @@ -2107,17 +5624,17 @@ def _get_response( return response def __call__(self, - request: eventarc.GetChannelConnectionRequest, *, + request: eventarc.ListMessageBusesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> channel_connection.ChannelConnection: - r"""Call the get channel connection method over HTTP. + ) -> eventarc.ListMessageBusesResponse: + r"""Call the list message buses method over HTTP. Args: - request (~.eventarc.GetChannelConnectionRequest): + request (~.eventarc.ListMessageBusesRequest): The request object. The request message for the - GetChannelConnection method. + ListMessageBuses method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2127,24 +5644,19 @@ def __call__(self, be of type `bytes`. Returns: - ~.channel_connection.ChannelConnection: - A representation of the - ChannelConnection resource. A - ChannelConnection is a resource which - event providers create during the - activation process to establish a - connection between the provider and the - subscriber channel. + ~.eventarc.ListMessageBusesResponse: + The response message for the + `ListMessageBuses` method. """ - http_options = _BaseEventarcRestTransport._BaseGetChannelConnection._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListMessageBuses._get_http_options() - request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetChannelConnection._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_message_buses(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListMessageBuses._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListMessageBuses._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) @@ -2160,17 +5672,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListMessageBuses", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetChannelConnection", + "rpcName": "ListMessageBuses", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._GetChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListMessageBuses._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2178,17 +5690,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = channel_connection.ChannelConnection() - pb_resp = channel_connection.ChannelConnection.pb(resp) + resp = eventarc.ListMessageBusesResponse() + pb_resp = eventarc.ListMessageBusesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_channel_connection(resp) + resp = self._interceptor.post_list_message_buses(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_channel_connection_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_message_buses_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = channel_connection.ChannelConnection.to_json(response) + response_payload = eventarc.ListMessageBusesResponse.to_json(response) except: response_payload = None http_response = { @@ -2197,19 +5709,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_message_buses", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetChannelConnection", + "rpcName": "ListMessageBuses", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetGoogleChannelConfig(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig, EventarcRestStub): + class _ListPipelines(_BaseEventarcRestTransport._BaseListPipelines, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.GetGoogleChannelConfig") + return hash("EventarcRestTransport.ListPipelines") @staticmethod def _get_response( @@ -2234,17 +5746,17 @@ def _get_response( return response def __call__(self, - request: eventarc.GetGoogleChannelConfigRequest, *, + request: eventarc.ListPipelinesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> google_channel_config.GoogleChannelConfig: - r"""Call the get google channel config method over HTTP. + ) -> eventarc.ListPipelinesResponse: + r"""Call the list pipelines method over HTTP. Args: - request (~.eventarc.GetGoogleChannelConfigRequest): + request (~.eventarc.ListPipelinesRequest): The request object. The request message for the - GetGoogleChannelConfig method. + ListPipelines method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2254,25 +5766,19 @@ def __call__(self, be of type `bytes`. Returns: - ~.google_channel_config.GoogleChannelConfig: - A GoogleChannelConfig is a resource - that stores the custom settings - respected by Eventarc first-party - triggers in the matching region. Once - configured, first-party event data will - be protected using the specified custom - managed encryption key instead of - Google-managed encryption keys. + ~.eventarc.ListPipelinesResponse: + The response message for the + ListPipelines method. """ - http_options = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListPipelines._get_http_options() - request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_pipelines(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListPipelines._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListPipelines._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) @@ -2288,17 +5794,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListPipelines", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetGoogleChannelConfig", + "rpcName": "ListPipelines", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._GetGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListPipelines._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2306,17 +5812,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = google_channel_config.GoogleChannelConfig() - pb_resp = google_channel_config.GoogleChannelConfig.pb(resp) + resp = eventarc.ListPipelinesResponse() + pb_resp = eventarc.ListPipelinesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_google_channel_config(resp) + resp = self._interceptor.post_list_pipelines(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_google_channel_config_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_pipelines_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = google_channel_config.GoogleChannelConfig.to_json(response) + response_payload = eventarc.ListPipelinesResponse.to_json(response) except: response_payload = None http_response = { @@ -2325,19 +5831,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_pipelines", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetGoogleChannelConfig", + "rpcName": "ListPipelines", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetProvider(_BaseEventarcRestTransport._BaseGetProvider, EventarcRestStub): + class _ListProviders(_BaseEventarcRestTransport._BaseListProviders, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.GetProvider") + return hash("EventarcRestTransport.ListProviders") @staticmethod def _get_response( @@ -2362,17 +5868,17 @@ def _get_response( return response def __call__(self, - request: eventarc.GetProviderRequest, *, + request: eventarc.ListProvidersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> discovery.Provider: - r"""Call the get provider method over HTTP. + ) -> eventarc.ListProvidersResponse: + r"""Call the list providers method over HTTP. Args: - request (~.eventarc.GetProviderRequest): + request (~.eventarc.ListProvidersRequest): The request object. The request message for the - GetProvider method. + ListProviders method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2382,19 +5888,19 @@ def __call__(self, be of type `bytes`. Returns: - ~.discovery.Provider: - A representation of the Provider - resource. + ~.eventarc.ListProvidersResponse: + The response message for the + `ListProviders` method. """ - http_options = _BaseEventarcRestTransport._BaseGetProvider._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListProviders._get_http_options() - request, metadata = self._interceptor.pre_get_provider(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetProvider._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_providers(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListProviders._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) @@ -2410,17 +5916,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetProvider", + "rpcName": "ListProviders", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._GetProvider._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListProviders._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2428,17 +5934,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = discovery.Provider() - pb_resp = discovery.Provider.pb(resp) + resp = eventarc.ListProvidersResponse() + pb_resp = eventarc.ListProvidersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_provider(resp) + resp = self._interceptor.post_list_providers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_provider_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_providers_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = discovery.Provider.to_json(response) + response_payload = eventarc.ListProvidersResponse.to_json(response) except: response_payload = None http_response = { @@ -2447,19 +5953,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.get_provider", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_providers", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetProvider", + "rpcName": "ListProviders", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetTrigger(_BaseEventarcRestTransport._BaseGetTrigger, EventarcRestStub): + class _ListTriggers(_BaseEventarcRestTransport._BaseListTriggers, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.GetTrigger") + return hash("EventarcRestTransport.ListTriggers") @staticmethod def _get_response( @@ -2484,17 +5990,17 @@ def _get_response( return response def __call__(self, - request: eventarc.GetTriggerRequest, *, + request: eventarc.ListTriggersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> trigger.Trigger: - r"""Call the get trigger method over HTTP. + ) -> eventarc.ListTriggersResponse: + r"""Call the list triggers method over HTTP. Args: - request (~.eventarc.GetTriggerRequest): + request (~.eventarc.ListTriggersRequest): The request object. The request message for the - GetTrigger method. + ListTriggers method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2504,19 +6010,19 @@ def __call__(self, be of type `bytes`. Returns: - ~.trigger.Trigger: - A representation of the trigger - resource. + ~.eventarc.ListTriggersResponse: + The response message for the + `ListTriggers` method. """ - http_options = _BaseEventarcRestTransport._BaseGetTrigger._get_http_options() + http_options = _BaseEventarcRestTransport._BaseListTriggers._get_http_options() - request, metadata = self._interceptor.pre_get_trigger(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetTrigger._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_triggers(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseListTriggers._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) @@ -2532,17 +6038,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetTrigger", + "rpcName": "ListTriggers", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._GetTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2550,17 +6056,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = trigger.Trigger() - pb_resp = trigger.Trigger.pb(resp) + resp = eventarc.ListTriggersResponse() + pb_resp = eventarc.ListTriggersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_trigger(resp) + resp = self._interceptor.post_list_triggers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_trigger_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_list_triggers_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = trigger.Trigger.to_json(response) + response_payload = eventarc.ListTriggersResponse.to_json(response) except: response_payload = None http_response = { @@ -2569,19 +6075,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.get_trigger", + "Received response for google.cloud.eventarc_v1.EventarcClient.list_triggers", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "GetTrigger", + "rpcName": "ListTriggers", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListChannelConnections(_BaseEventarcRestTransport._BaseListChannelConnections, EventarcRestStub): + class _UpdateChannel(_BaseEventarcRestTransport._BaseUpdateChannel, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.ListChannelConnections") + return hash("EventarcRestTransport.UpdateChannel") @staticmethod def _get_response( @@ -2602,21 +6108,22 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__(self, - request: eventarc.ListChannelConnectionsRequest, *, + request: eventarc.UpdateChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListChannelConnectionsResponse: - r"""Call the list channel connections method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update channel method over HTTP. Args: - request (~.eventarc.ListChannelConnectionsRequest): + request (~.eventarc.UpdateChannelRequest): The request object. The request message for the - ListChannelConnections method. + UpdateChannel method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2626,25 +6133,28 @@ def __call__(self, be of type `bytes`. Returns: - ~.eventarc.ListChannelConnectionsResponse: - The response message for the - `ListChannelConnections` method. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options = _BaseEventarcRestTransport._BaseListChannelConnections._get_http_options() + http_options = _BaseEventarcRestTransport._BaseUpdateChannel._get_http_options() - request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListChannelConnections._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_channel(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateChannel._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseUpdateChannel._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -2654,17 +6164,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListChannelConnections", + "rpcName": "UpdateChannel", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._ListChannelConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._UpdateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2672,17 +6182,15 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = eventarc.ListChannelConnectionsResponse() - pb_resp = eventarc.ListChannelConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_channel_connections(resp) + resp = self._interceptor.post_update_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_channel_connections_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_update_channel_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = eventarc.ListChannelConnectionsResponse.to_json(response) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -2691,19 +6199,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.list_channel_connections", + "Received response for google.cloud.eventarc_v1.EventarcClient.update_channel", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListChannelConnections", + "rpcName": "UpdateChannel", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListChannels(_BaseEventarcRestTransport._BaseListChannels, EventarcRestStub): + class _UpdateEnrollment(_BaseEventarcRestTransport._BaseUpdateEnrollment, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.ListChannels") + return hash("EventarcRestTransport.UpdateEnrollment") @staticmethod def _get_response( @@ -2724,21 +6232,22 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__(self, - request: eventarc.ListChannelsRequest, *, + request: eventarc.UpdateEnrollmentRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListChannelsResponse: - r"""Call the list channels method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update enrollment method over HTTP. Args: - request (~.eventarc.ListChannelsRequest): + request (~.eventarc.UpdateEnrollmentRequest): The request object. The request message for the - ListChannels method. + UpdateEnrollment method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2748,25 +6257,28 @@ def __call__(self, be of type `bytes`. Returns: - ~.eventarc.ListChannelsResponse: - The response message for the - `ListChannels` method. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options = _BaseEventarcRestTransport._BaseListChannels._get_http_options() + http_options = _BaseEventarcRestTransport._BaseUpdateEnrollment._get_http_options() - request, metadata = self._interceptor.pre_list_channels(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListChannels._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_enrollment(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateEnrollment._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseUpdateEnrollment._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseUpdateEnrollment._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -2776,17 +6288,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateEnrollment", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListChannels", + "rpcName": "UpdateEnrollment", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._ListChannels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._UpdateEnrollment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2794,17 +6306,15 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = eventarc.ListChannelsResponse() - pb_resp = eventarc.ListChannelsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_channels(resp) + resp = self._interceptor.post_update_enrollment(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_channels_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_update_enrollment_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = eventarc.ListChannelsResponse.to_json(response) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -2813,19 +6323,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.list_channels", + "Received response for google.cloud.eventarc_v1.EventarcClient.update_enrollment", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListChannels", + "rpcName": "UpdateEnrollment", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListProviders(_BaseEventarcRestTransport._BaseListProviders, EventarcRestStub): + class _UpdateGoogleApiSource(_BaseEventarcRestTransport._BaseUpdateGoogleApiSource, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.ListProviders") + return hash("EventarcRestTransport.UpdateGoogleApiSource") @staticmethod def _get_response( @@ -2846,21 +6356,22 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__(self, - request: eventarc.ListProvidersRequest, *, + request: eventarc.UpdateGoogleApiSourceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListProvidersResponse: - r"""Call the list providers method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update google api source method over HTTP. Args: - request (~.eventarc.ListProvidersRequest): + request (~.eventarc.UpdateGoogleApiSourceRequest): The request object. The request message for the - ListProviders method. + UpdateGoogleApiSource method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2870,25 +6381,28 @@ def __call__(self, be of type `bytes`. Returns: - ~.eventarc.ListProvidersResponse: - The response message for the - `ListProviders` method. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options = _BaseEventarcRestTransport._BaseListProviders._get_http_options() + http_options = _BaseEventarcRestTransport._BaseUpdateGoogleApiSource._get_http_options() - request, metadata = self._interceptor.pre_list_providers(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListProviders._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_google_api_source(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleApiSource._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseUpdateGoogleApiSource._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseUpdateGoogleApiSource._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -2898,17 +6412,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleApiSource", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListProviders", + "rpcName": "UpdateGoogleApiSource", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._ListProviders._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._UpdateGoogleApiSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2916,17 +6430,15 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = eventarc.ListProvidersResponse() - pb_resp = eventarc.ListProvidersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_providers(resp) + resp = self._interceptor.post_update_google_api_source(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_providers_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_update_google_api_source_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = eventarc.ListProvidersResponse.to_json(response) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -2935,19 +6447,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.list_providers", + "Received response for google.cloud.eventarc_v1.EventarcClient.update_google_api_source", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListProviders", + "rpcName": "UpdateGoogleApiSource", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListTriggers(_BaseEventarcRestTransport._BaseListTriggers, EventarcRestStub): + class _UpdateGoogleChannelConfig(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.ListTriggers") + return hash("EventarcRestTransport.UpdateGoogleChannelConfig") @staticmethod def _get_response( @@ -2968,21 +6480,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__(self, - request: eventarc.ListTriggersRequest, *, + request: eventarc.UpdateGoogleChannelConfigRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListTriggersResponse: - r"""Call the list triggers method over HTTP. + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Call the update google channel + config method over HTTP. Args: - request (~.eventarc.ListTriggersRequest): + request (~.eventarc.UpdateGoogleChannelConfigRequest): The request object. The request message for the - ListTriggers method. + UpdateGoogleChannelConfig method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2992,19 +6506,27 @@ def __call__(self, be of type `bytes`. Returns: - ~.eventarc.ListTriggersResponse: - The response message for the - `ListTriggers` method. + ~.gce_google_channel_config.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. """ - http_options = _BaseEventarcRestTransport._BaseListTriggers._get_http_options() + http_options = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_http_options() - request, metadata = self._interceptor.pre_list_triggers(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListTriggers._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_transcoded_request(http_options, request) + + body = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) @@ -3020,17 +6542,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListTriggers", + "rpcName": "UpdateGoogleChannelConfig", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._ListTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3038,17 +6560,17 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = eventarc.ListTriggersResponse() - pb_resp = eventarc.ListTriggersResponse.pb(resp) + resp = gce_google_channel_config.GoogleChannelConfig() + pb_resp = gce_google_channel_config.GoogleChannelConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_triggers(resp) + resp = self._interceptor.post_update_google_channel_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_triggers_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_update_google_channel_config_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = eventarc.ListTriggersResponse.to_json(response) + response_payload = gce_google_channel_config.GoogleChannelConfig.to_json(response) except: response_payload = None http_response = { @@ -3057,19 +6579,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.list_triggers", + "Received response for google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "ListTriggers", + "rpcName": "UpdateGoogleChannelConfig", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _UpdateChannel(_BaseEventarcRestTransport._BaseUpdateChannel, EventarcRestStub): + class _UpdateMessageBus(_BaseEventarcRestTransport._BaseUpdateMessageBus, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.UpdateChannel") + return hash("EventarcRestTransport.UpdateMessageBus") @staticmethod def _get_response( @@ -3095,17 +6617,17 @@ def _get_response( return response def __call__(self, - request: eventarc.UpdateChannelRequest, *, + request: eventarc.UpdateMessageBusRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: - r"""Call the update channel method over HTTP. + r"""Call the update message bus method over HTTP. Args: - request (~.eventarc.UpdateChannelRequest): + request (~.eventarc.UpdateMessageBusRequest): The request object. The request message for the - UpdateChannel method. + UpdateMessageBus method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3122,15 +6644,15 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseUpdateChannel._get_http_options() + http_options = _BaseEventarcRestTransport._BaseUpdateMessageBus._get_http_options() - request, metadata = self._interceptor.pre_update_channel(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseUpdateChannel._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_message_bus(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateMessageBus._get_transcoded_request(http_options, request) - body = _BaseEventarcRestTransport._BaseUpdateChannel._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseUpdateMessageBus._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseUpdateMessageBus._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) @@ -3146,17 +6668,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateMessageBus", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "UpdateChannel", + "rpcName": "UpdateMessageBus", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._UpdateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdateMessageBus._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3167,9 +6689,9 @@ def __call__(self, resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_channel(resp) + resp = self._interceptor.post_update_message_bus(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_channel_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_update_message_bus_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -3181,19 +6703,19 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.update_channel", + "Received response for google.cloud.eventarc_v1.EventarcClient.update_message_bus", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "UpdateChannel", + "rpcName": "UpdateMessageBus", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _UpdateGoogleChannelConfig(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig, EventarcRestStub): + class _UpdatePipeline(_BaseEventarcRestTransport._BaseUpdatePipeline, EventarcRestStub): def __hash__(self): - return hash("EventarcRestTransport.UpdateGoogleChannelConfig") + return hash("EventarcRestTransport.UpdatePipeline") @staticmethod def _get_response( @@ -3219,18 +6741,17 @@ def _get_response( return response def __call__(self, - request: eventarc.UpdateGoogleChannelConfigRequest, *, + request: eventarc.UpdatePipelineRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gce_google_channel_config.GoogleChannelConfig: - r"""Call the update google channel - config method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update pipeline method over HTTP. Args: - request (~.eventarc.UpdateGoogleChannelConfigRequest): + request (~.eventarc.UpdatePipelineRequest): The request object. The request message for the - UpdateGoogleChannelConfig method. + UpdatePipeline method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3240,33 +6761,28 @@ def __call__(self, be of type `bytes`. Returns: - ~.gce_google_channel_config.GoogleChannelConfig: - A GoogleChannelConfig is a resource - that stores the custom settings - respected by Eventarc first-party - triggers in the matching region. Once - configured, first-party event data will - be protected using the specified custom - managed encryption key instead of - Google-managed encryption keys. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_http_options() + http_options = _BaseEventarcRestTransport._BaseUpdatePipeline._get_http_options() - request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_pipeline(request, metadata) + transcoded_request = _BaseEventarcRestTransport._BaseUpdatePipeline._get_transcoded_request(http_options, request) - body = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseUpdatePipeline._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) + query_params = _BaseEventarcRestTransport._BaseUpdatePipeline._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -3276,17 +6792,17 @@ def __call__(self, "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdatePipeline", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "UpdateGoogleChannelConfig", + "rpcName": "UpdatePipeline", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdatePipeline._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3294,17 +6810,15 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = gce_google_channel_config.GoogleChannelConfig() - pb_resp = gce_google_channel_config.GoogleChannelConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_google_channel_config(resp) + resp = self._interceptor.post_update_pipeline(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_google_channel_config_with_metadata(resp, response_metadata) + resp, _ = self._interceptor.post_update_pipeline_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: - response_payload = gce_google_channel_config.GoogleChannelConfig.to_json(response) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -3313,10 +6827,10 @@ def __call__(self, "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", + "Received response for google.cloud.eventarc_v1.EventarcClient.update_pipeline", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": "UpdateGoogleChannelConfig", + "rpcName": "UpdatePipeline", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -3463,6 +6977,38 @@ def create_channel_connection(self) -> Callable[ # In C++ this would require a dynamic_cast return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore + @property + def create_enrollment(self) -> Callable[ + [eventarc.CreateEnrollmentRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEnrollment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_google_api_source(self) -> Callable[ + [eventarc.CreateGoogleApiSourceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGoogleApiSource(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_message_bus(self) -> Callable[ + [eventarc.CreateMessageBusRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateMessageBus(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_pipeline(self) -> Callable[ + [eventarc.CreatePipelineRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePipeline(self._session, self._host, self._interceptor) # type: ignore + @property def create_trigger(self) -> Callable[ [eventarc.CreateTriggerRequest], @@ -3487,6 +7033,38 @@ def delete_channel_connection(self) -> Callable[ # In C++ this would require a dynamic_cast return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_enrollment(self) -> Callable[ + [eventarc.DeleteEnrollmentRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEnrollment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_google_api_source(self) -> Callable[ + [eventarc.DeleteGoogleApiSourceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGoogleApiSource(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_message_bus(self) -> Callable[ + [eventarc.DeleteMessageBusRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteMessageBus(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_pipeline(self) -> Callable[ + [eventarc.DeletePipelineRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePipeline(self._session, self._host, self._interceptor) # type: ignore + @property def delete_trigger(self) -> Callable[ [eventarc.DeleteTriggerRequest], @@ -3511,6 +7089,22 @@ def get_channel_connection(self) -> Callable[ # In C++ this would require a dynamic_cast return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore + @property + def get_enrollment(self) -> Callable[ + [eventarc.GetEnrollmentRequest], + enrollment.Enrollment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEnrollment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_google_api_source(self) -> Callable[ + [eventarc.GetGoogleApiSourceRequest], + google_api_source.GoogleApiSource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGoogleApiSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_google_channel_config(self) -> Callable[ [eventarc.GetGoogleChannelConfigRequest], @@ -3519,6 +7113,22 @@ def get_google_channel_config(self) -> Callable[ # In C++ this would require a dynamic_cast return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + @property + def get_message_bus(self) -> Callable[ + [eventarc.GetMessageBusRequest], + message_bus.MessageBus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMessageBus(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_pipeline(self) -> Callable[ + [eventarc.GetPipelineRequest], + pipeline.Pipeline]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPipeline(self._session, self._host, self._interceptor) # type: ignore + @property def get_provider(self) -> Callable[ [eventarc.GetProviderRequest], @@ -3551,6 +7161,46 @@ def list_channels(self) -> Callable[ # In C++ this would require a dynamic_cast return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore + @property + def list_enrollments(self) -> Callable[ + [eventarc.ListEnrollmentsRequest], + eventarc.ListEnrollmentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEnrollments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_google_api_sources(self) -> Callable[ + [eventarc.ListGoogleApiSourcesRequest], + eventarc.ListGoogleApiSourcesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGoogleApiSources(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_message_bus_enrollments(self) -> Callable[ + [eventarc.ListMessageBusEnrollmentsRequest], + eventarc.ListMessageBusEnrollmentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMessageBusEnrollments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_message_buses(self) -> Callable[ + [eventarc.ListMessageBusesRequest], + eventarc.ListMessageBusesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMessageBuses(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_pipelines(self) -> Callable[ + [eventarc.ListPipelinesRequest], + eventarc.ListPipelinesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPipelines(self._session, self._host, self._interceptor) # type: ignore + @property def list_providers(self) -> Callable[ [eventarc.ListProvidersRequest], @@ -3575,6 +7225,22 @@ def update_channel(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore + @property + def update_enrollment(self) -> Callable[ + [eventarc.UpdateEnrollmentRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEnrollment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_google_api_source(self) -> Callable[ + [eventarc.UpdateGoogleApiSourceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGoogleApiSource(self._session, self._host, self._interceptor) # type: ignore + @property def update_google_channel_config(self) -> Callable[ [eventarc.UpdateGoogleChannelConfigRequest], @@ -3583,6 +7249,22 @@ def update_google_channel_config(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + @property + def update_message_bus(self) -> Callable[ + [eventarc.UpdateMessageBusRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateMessageBus(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_pipeline(self) -> Callable[ + [eventarc.UpdatePipelineRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePipeline(self._session, self._host, self._interceptor) # type: ignore + @property def update_trigger(self) -> Callable[ [eventarc.UpdateTriggerRequest], diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py index 6ff0278064..75cb5113f8 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -30,9 +30,13 @@ from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import pipeline from google.cloud.eventarc_v1.types import trigger from google.longrunning import operations_pb2 # type: ignore @@ -100,7 +104,7 @@ def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "channelId" : "", "validateOnly" : False, } + "channelId" : "", } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -187,12 +191,674 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseCreateEnrollment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "enrollmentId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/enrollments', + 'body': 'enrollment', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreateEnrollmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreateEnrollment._get_unset_required_fields(query_params)) + + return query_params + + class _BaseCreateGoogleApiSource: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "googleApiSourceId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/googleApiSources', + 'body': 'google_api_source', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreateGoogleApiSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreateGoogleApiSource._get_unset_required_fields(query_params)) + + return query_params + + class _BaseCreateMessageBus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "messageBusId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/messageBuses', + 'body': 'message_bus', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreateMessageBusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreateMessageBus._get_unset_required_fields(query_params)) + + return query_params + + class _BaseCreatePipeline: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "pipelineId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/pipelines', + 'body': 'pipeline', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreatePipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreatePipeline._get_unset_required_fields(query_params)) + + return query_params + class _BaseCreateTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "triggerId" : "", "validateOnly" : False, } + "triggerId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'body': 'trigger', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreateTrigger._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteChannel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteChannel._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteChannelConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteChannelConnection._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteEnrollment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/enrollments/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteEnrollmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteEnrollment._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteGoogleApiSource: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/googleApiSources/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteGoogleApiSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteGoogleApiSource._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteMessageBus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/messageBuses/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteMessageBusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteMessageBus._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeletePipeline: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/pipelines/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeletePipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeletePipeline._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteTrigger._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetChannel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetChannel._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetChannelConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetChannelConnection._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetEnrollment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/enrollments/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetEnrollmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetEnrollment._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetGoogleApiSource: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/googleApiSources/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetGoogleApiSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetGoogleApiSource._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetGoogleChannelConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetGoogleChannelConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetMessageBus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -201,44 +867,34 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/messageBuses/*}', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.CreateTriggerRequest.pb(request) + pb_request = eventarc.GetMessageBusRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - return body - @staticmethod def _get_query_params_json(transcoded_request): query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseCreateTrigger._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseGetMessageBus._get_unset_required_fields(query_params)) return query_params - class _BaseDeleteChannel: + class _BaseGetPipeline: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -247,15 +903,15 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/pipelines/*}', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.DeleteChannelRequest.pb(request) + pb_request = eventarc.GetPipelineRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -265,11 +921,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseDeleteChannel._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseGetPipeline._get_unset_required_fields(query_params)) return query_params - class _BaseDeleteChannelConnection: + class _BaseGetProvider: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -283,15 +939,15 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/providers/*}', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.DeleteChannelConnectionRequest.pb(request) + pb_request = eventarc.GetProviderRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -301,16 +957,16 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseDeleteChannelConnection._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseGetProvider._get_unset_required_fields(query_params)) return query_params - class _BaseDeleteTrigger: + class _BaseGetTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -319,7 +975,7 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): http_options: List[Dict[str, str]] = [{ - 'method': 'delete', + 'method': 'get', 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', }, ] @@ -327,7 +983,7 @@ def _get_http_options(): @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.DeleteTriggerRequest.pb(request) + pb_request = eventarc.GetTriggerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -337,11 +993,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseDeleteTrigger._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseGetTrigger._get_unset_required_fields(query_params)) return query_params - class _BaseGetChannel: + class _BaseListChannelConnections: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -356,14 +1012,14 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.GetChannelRequest.pb(request) + pb_request = eventarc.ListChannelConnectionsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -373,11 +1029,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseGetChannel._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseListChannelConnections._get_unset_required_fields(query_params)) return query_params - class _BaseGetChannelConnection: + class _BaseListChannels: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -392,14 +1048,14 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + 'uri': '/v1/{parent=projects/*/locations/*}/channels', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.GetChannelConnectionRequest.pb(request) + pb_request = eventarc.ListChannelsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -409,11 +1065,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseGetChannelConnection._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseListChannels._get_unset_required_fields(query_params)) return query_params - class _BaseGetGoogleChannelConfig: + class _BaseListEnrollments: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -428,14 +1084,14 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', + 'uri': '/v1/{parent=projects/*/locations/*}/enrollments', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.GetGoogleChannelConfigRequest.pb(request) + pb_request = eventarc.ListEnrollmentsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -445,11 +1101,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseListEnrollments._get_unset_required_fields(query_params)) return query_params - class _BaseGetProvider: + class _BaseListGoogleApiSources: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -464,14 +1120,14 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/providers/*}', + 'uri': '/v1/{parent=projects/*/locations/*}/googleApiSources', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.GetProviderRequest.pb(request) + pb_request = eventarc.ListGoogleApiSourcesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -481,11 +1137,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseGetProvider._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseListGoogleApiSources._get_unset_required_fields(query_params)) return query_params - class _BaseGetTrigger: + class _BaseListMessageBusEnrollments: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -500,14 +1156,14 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + 'uri': '/v1/{parent=projects/*/locations/*/messageBuses/*}:listEnrollments', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.GetTriggerRequest.pb(request) + pb_request = eventarc.ListMessageBusEnrollmentsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -517,11 +1173,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseGetTrigger._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseListMessageBusEnrollments._get_unset_required_fields(query_params)) return query_params - class _BaseListChannelConnections: + class _BaseListMessageBuses: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -536,14 +1192,14 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', + 'uri': '/v1/{parent=projects/*/locations/*}/messageBuses', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.ListChannelConnectionsRequest.pb(request) + pb_request = eventarc.ListMessageBusesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -553,11 +1209,11 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseListChannelConnections._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseListMessageBuses._get_unset_required_fields(query_params)) return query_params - class _BaseListChannels: + class _BaseListPipelines: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -572,14 +1228,14 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channels', + 'uri': '/v1/{parent=projects/*/locations/*}/pipelines', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.ListChannelsRequest.pb(request) + pb_request = eventarc.ListPipelinesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -589,7 +1245,7 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseListChannels._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseListPipelines._get_unset_required_fields(query_params)) return query_params @@ -669,8 +1325,46 @@ class _BaseUpdateChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', + 'body': 'channel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.UpdateChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + + return query_params + + class _BaseUpdateEnrollment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -680,15 +1374,61 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [{ 'method': 'patch', - 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', - 'body': 'channel', + 'uri': '/v1/{enrollment.name=projects/*/locations/*/enrollments/*}', + 'body': 'enrollment', }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = eventarc.UpdateChannelRequest.pb(request) + pb_request = eventarc.UpdateEnrollmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseUpdateEnrollment._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateGoogleApiSource: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{google_api_source.name=projects/*/locations/*/googleApiSources/*}', + 'body': 'google_api_source', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.UpdateGoogleApiSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -707,7 +1447,7 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseUpdateChannel._get_unset_required_fields(query_params)) + query_params.update(_BaseEventarcRestTransport._BaseUpdateGoogleApiSource._get_unset_required_fields(query_params)) return query_params @@ -757,17 +1497,102 @@ def _get_query_params_json(transcoded_request): return query_params - class _BaseUpdateTrigger: + class _BaseUpdateMessageBus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{message_bus.name=projects/*/locations/*/messageBuses/*}', + 'body': 'message_bus', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.UpdateMessageBusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseUpdateMessageBus._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdatePipeline: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{pipeline.name=projects/*/locations/*/pipelines/*}', + 'body': 'pipeline', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.UpdatePipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseUpdatePipeline._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + @staticmethod def _get_http_options(): http_options: List[Dict[str, str]] = [{ @@ -799,7 +1624,6 @@ def _get_query_params_json(transcoded_request): transcoded_request['query_params'], use_integers_for_enums=False, )) - query_params.update(_BaseEventarcRestTransport._BaseUpdateTrigger._get_unset_required_fields(query_params)) return query_params diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 0521e2f2f2..87e8dd3950 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -24,39 +24,84 @@ FilteringAttribute, Provider, ) +from .enrollment import ( + Enrollment, +) from .eventarc import ( CreateChannelConnectionRequest, CreateChannelRequest, + CreateEnrollmentRequest, + CreateGoogleApiSourceRequest, + CreateMessageBusRequest, + CreatePipelineRequest, CreateTriggerRequest, DeleteChannelConnectionRequest, DeleteChannelRequest, + DeleteEnrollmentRequest, + DeleteGoogleApiSourceRequest, + DeleteMessageBusRequest, + DeletePipelineRequest, DeleteTriggerRequest, GetChannelConnectionRequest, GetChannelRequest, + GetEnrollmentRequest, + GetGoogleApiSourceRequest, GetGoogleChannelConfigRequest, + GetMessageBusRequest, + GetPipelineRequest, GetProviderRequest, GetTriggerRequest, ListChannelConnectionsRequest, ListChannelConnectionsResponse, ListChannelsRequest, ListChannelsResponse, + ListEnrollmentsRequest, + ListEnrollmentsResponse, + ListGoogleApiSourcesRequest, + ListGoogleApiSourcesResponse, + ListMessageBusEnrollmentsRequest, + ListMessageBusEnrollmentsResponse, + ListMessageBusesRequest, + ListMessageBusesResponse, + ListPipelinesRequest, + ListPipelinesResponse, ListProvidersRequest, ListProvidersResponse, ListTriggersRequest, ListTriggersResponse, OperationMetadata, UpdateChannelRequest, + UpdateEnrollmentRequest, + UpdateGoogleApiSourceRequest, UpdateGoogleChannelConfigRequest, + UpdateMessageBusRequest, + UpdatePipelineRequest, UpdateTriggerRequest, ) +from .google_api_source import ( + GoogleApiSource, +) from .google_channel_config import ( GoogleChannelConfig, ) +from .logging_config import ( + LoggingConfig, +) +from .message_bus import ( + MessageBus, +) +from .network_config import ( + NetworkConfig, +) +from .pipeline import ( + Pipeline, +) from .trigger import ( CloudRun, Destination, EventFilter, GKE, + HttpEndpoint, Pubsub, StateCondition, Transport, @@ -69,34 +114,67 @@ 'EventType', 'FilteringAttribute', 'Provider', + 'Enrollment', 'CreateChannelConnectionRequest', 'CreateChannelRequest', + 'CreateEnrollmentRequest', + 'CreateGoogleApiSourceRequest', + 'CreateMessageBusRequest', + 'CreatePipelineRequest', 'CreateTriggerRequest', 'DeleteChannelConnectionRequest', 'DeleteChannelRequest', + 'DeleteEnrollmentRequest', + 'DeleteGoogleApiSourceRequest', + 'DeleteMessageBusRequest', + 'DeletePipelineRequest', 'DeleteTriggerRequest', 'GetChannelConnectionRequest', 'GetChannelRequest', + 'GetEnrollmentRequest', + 'GetGoogleApiSourceRequest', 'GetGoogleChannelConfigRequest', + 'GetMessageBusRequest', + 'GetPipelineRequest', 'GetProviderRequest', 'GetTriggerRequest', 'ListChannelConnectionsRequest', 'ListChannelConnectionsResponse', 'ListChannelsRequest', 'ListChannelsResponse', + 'ListEnrollmentsRequest', + 'ListEnrollmentsResponse', + 'ListGoogleApiSourcesRequest', + 'ListGoogleApiSourcesResponse', + 'ListMessageBusEnrollmentsRequest', + 'ListMessageBusEnrollmentsResponse', + 'ListMessageBusesRequest', + 'ListMessageBusesResponse', + 'ListPipelinesRequest', + 'ListPipelinesResponse', 'ListProvidersRequest', 'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', 'OperationMetadata', 'UpdateChannelRequest', + 'UpdateEnrollmentRequest', + 'UpdateGoogleApiSourceRequest', 'UpdateGoogleChannelConfigRequest', + 'UpdateMessageBusRequest', + 'UpdatePipelineRequest', 'UpdateTriggerRequest', + 'GoogleApiSource', 'GoogleChannelConfig', + 'LoggingConfig', + 'MessageBus', + 'NetworkConfig', + 'Pipeline', 'CloudRun', 'Destination', 'EventFilter', 'GKE', + 'HttpEndpoint', 'Pubsub', 'StateCondition', 'Transport', diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py index 83fdc73ae3..05f3c75e1c 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -84,6 +84,12 @@ class Channel(proto.Message): It must match the pattern `projects/*/locations/*/keyRings/*/cryptoKeys/*`. + satisfies_pzs (bool): + Output only. Whether or not this Channel + satisfies the requirements of physical zone + separation + labels (MutableMapping[str, str]): + Optional. Resource labels. """ class State(proto.Enum): r"""State lists all the possible states of a Channel @@ -161,6 +167,15 @@ class State(proto.Enum): proto.STRING, number=11, ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=12, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py index 26adbed388..82eadc529d 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py @@ -60,6 +60,8 @@ class ChannelConnection(proto.Message): ChannelConnection to bind the channel with the provider project. This field will not be stored in the provider resource. + labels (MutableMapping[str, str]): + Optional. Resource labels. """ name: str = proto.Field( @@ -88,6 +90,11 @@ class ChannelConnection(proto.Message): proto.STRING, number=8, ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/enrollment.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/enrollment.py new file mode 100755 index 0000000000..418badd82c --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/enrollment.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'Enrollment', + }, +) + + +class Enrollment(proto.Message): + r"""An enrollment represents a subscription for messages on a + particular message bus. It defines a matching criteria for + messages on the bus and the subscriber endpoint where matched + messages should be delivered. + + Attributes: + name (str): + Identifier. Resource name of the form + projects/{project}/locations/{location}/enrollments/{enrollment} + uid (str): + Output only. Server assigned unique + identifier for the channel. The value is a UUID4 + string and guaranteed to remain unchanged until + the resource is deleted. + etag (str): + Output only. This checksum is computed by the + server based on the value of other fields, and + might be sent only on update and delete requests + to ensure that the client has an up-to-date + value before proceeding. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + labels (MutableMapping[str, str]): + Optional. Resource labels. + annotations (MutableMapping[str, str]): + Optional. Resource annotations. + display_name (str): + Optional. Resource display name. + cel_match (str): + Required. A CEL expression identifying which + messages this enrollment applies to. + message_bus (str): + Required. Immutable. Resource name of the + message bus identifying the source of the + messages. It matches the form + projects/{project}/locations/{location}/messageBuses/{messageBus}. + destination (str): + Required. Destination is the Pipeline that + the Enrollment is delivering to. It must point + to the full resource name of a Pipeline. Format: + + "projects/{PROJECT_ID}/locations/{region}/pipelines/{PIPELINE_ID)". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + display_name: str = proto.Field( + proto.STRING, + number=8, + ) + cel_match: str = proto.Field( + proto.STRING, + number=9, + ) + message_bus: str = proto.Field( + proto.STRING, + number=10, + ) + destination: str = proto.Field( + proto.STRING, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index 53b08538bc..3b60b430c1 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -22,7 +22,11 @@ from google.cloud.eventarc_v1.types import channel as gce_channel from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment as gce_enrollment +from google.cloud.eventarc_v1.types import google_api_source as gce_google_api_source from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import message_bus as gce_message_bus +from google.cloud.eventarc_v1.types import pipeline as gce_pipeline from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -53,6 +57,32 @@ 'DeleteChannelConnectionRequest', 'UpdateGoogleChannelConfigRequest', 'GetGoogleChannelConfigRequest', + 'GetMessageBusRequest', + 'ListMessageBusesRequest', + 'ListMessageBusesResponse', + 'ListMessageBusEnrollmentsRequest', + 'ListMessageBusEnrollmentsResponse', + 'CreateMessageBusRequest', + 'UpdateMessageBusRequest', + 'DeleteMessageBusRequest', + 'GetEnrollmentRequest', + 'ListEnrollmentsRequest', + 'ListEnrollmentsResponse', + 'CreateEnrollmentRequest', + 'UpdateEnrollmentRequest', + 'DeleteEnrollmentRequest', + 'GetPipelineRequest', + 'ListPipelinesRequest', + 'ListPipelinesResponse', + 'CreatePipelineRequest', + 'UpdatePipelineRequest', + 'DeletePipelineRequest', + 'GetGoogleApiSourceRequest', + 'ListGoogleApiSourcesRequest', + 'ListGoogleApiSourcesResponse', + 'CreateGoogleApiSourceRequest', + 'UpdateGoogleApiSourceRequest', + 'DeleteGoogleApiSourceRequest', 'OperationMetadata', }, ) @@ -177,7 +207,7 @@ class CreateTriggerRequest(proto.Message): Required. The user-provided ID to be assigned to the trigger. validate_only (bool): - Required. If set, validate the request and + Optional. If set, validate the request and preview the review, but do not post it. """ @@ -217,7 +247,7 @@ class UpdateTriggerRequest(proto.Message): a new trigger will be created. In this situation, `update_mask` is ignored. validate_only (bool): - Required. If set, validate the request and + Optional. If set, validate the request and preview the review, but do not post it. """ @@ -257,7 +287,7 @@ class DeleteTriggerRequest(proto.Message): the request will succeed but no action will be taken on the server. validate_only (bool): - Required. If set, validate the request and + Optional. If set, validate the request and preview the review, but do not post it. """ @@ -388,7 +418,7 @@ class CreateChannelRequest(proto.Message): Required. The user-provided ID to be assigned to the channel. validate_only (bool): - Required. If set, validate the request and + Optional. If set, validate the request and preview the review, but do not post it. """ @@ -424,7 +454,7 @@ class UpdateChannelRequest(proto.Message): request are updated. To update all fields, provide a field mask of "*". validate_only (bool): - Required. If set, validate the request and + Optional. If set, validate the request and preview the review, but do not post it. """ @@ -452,7 +482,7 @@ class DeleteChannelRequest(proto.Message): Required. The name of the channel to be deleted. validate_only (bool): - Required. If set, validate the request and + Optional. If set, validate the request and preview the review, but do not post it. """ @@ -737,6 +767,956 @@ class GetGoogleChannelConfigRequest(proto.Message): ) +class GetMessageBusRequest(proto.Message): + r"""The request message for the GetMessageBus method. + + Attributes: + name (str): + Required. The name of the message bus to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListMessageBusesRequest(proto.Message): + r"""The request message for the ListMessageBuses method. + + Attributes: + parent (str): + Required. The parent collection to list + message buses on. + page_size (int): + Optional. The maximum number of results to + return on each page. + Note: The service may send fewer. + page_token (str): + Optional. The page token; provide the value + from the `next_page_token` field in a previous + call to retrieve the subsequent page. + + When paginating, all other parameters provided + must match the previous call that provided the + page token. + order_by (str): + Optional. The sorting order of the resources + returned. Value should be a comma-separated list + of fields. The default sorting order is + ascending. To specify descending order for a + field, append a `desc` suffix; for example: + + `name desc, update_time`. + filter (str): + Optional. The filter field that the list + request will filter on. Possible filtersare + described in https://google.aip.dev/160. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMessageBusesResponse(proto.Message): + r"""The response message for the `ListMessageBuses` method. + + Attributes: + message_buses (MutableSequence[google.cloud.eventarc_v1.types.MessageBus]): + The requested message buses, up to the number + specified in `page_size`. + next_page_token (str): + A page token that can be sent to + `ListMessageBuses` to request the next page. If + this is empty, then there are no more pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + message_buses: MutableSequence[gce_message_bus.MessageBus] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_message_bus.MessageBus, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListMessageBusEnrollmentsRequest(proto.Message): + r"""The request message for the `ListMessageBusEnrollments` + method. + + Attributes: + parent (str): + Required. The parent message bus to list + enrollments on. + page_size (int): + Optional. The maximum number of results to + return on each page. + Note: The service may send fewer. + page_token (str): + Optional. The page token; provide the value + from the `next_page_token` field in a previous + call to retrieve the subsequent page. + + When paginating, all other parameters provided + must match the previous call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListMessageBusEnrollmentsResponse(proto.Message): + r"""The response message for the `ListMessageBusEnrollments` + method.` + + Attributes: + enrollments (MutableSequence[str]): + The requested enrollments, up to the number + specified in `page_size`. + next_page_token (str): + A page token that can be sent to + `ListMessageBusEnrollments` to request the next + page. If this is empty, then there are no more + pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + enrollments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateMessageBusRequest(proto.Message): + r"""The request message for the CreateMessageBus method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this message bus. + message_bus (google.cloud.eventarc_v1.types.MessageBus): + Required. The message bus to create. + message_bus_id (str): + Required. The user-provided ID to be assigned + to the MessageBus. It should match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + message_bus: gce_message_bus.MessageBus = proto.Field( + proto.MESSAGE, + number=2, + message=gce_message_bus.MessageBus, + ) + message_bus_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateMessageBusRequest(proto.Message): + r"""The request message for the UpdateMessageBus method. + + Attributes: + message_bus (google.cloud.eventarc_v1.types.MessageBus): + Required. The MessageBus to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; only + fields explicitly provided are updated. If no + field mask is provided, all provided fields in + the request are updated. To update all fields, + provide a field mask of "*". + allow_missing (bool): + Optional. If set to true, and the MessageBus + is not found, a new MessageBus will be created. + In this situation, `update_mask` is ignored. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + message_bus: gce_message_bus.MessageBus = proto.Field( + proto.MESSAGE, + number=1, + message=gce_message_bus.MessageBus, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteMessageBusRequest(proto.Message): + r"""The request message for the DeleteMessageBus method. + + Attributes: + name (str): + Required. The name of the MessageBus to be + deleted. + etag (str): + Optional. If provided, the MessageBus will + only be deleted if the etag matches the current + etag on the resource. + allow_missing (bool): + Optional. If set to true, and the MessageBus + is not found, the request will succeed but no + action will be taken on the server. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetEnrollmentRequest(proto.Message): + r"""The request message for the GetEnrollment method. + + Attributes: + name (str): + Required. The name of the Enrollment to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEnrollmentsRequest(proto.Message): + r"""The request message for the ListEnrollments method. + + Attributes: + parent (str): + Required. The parent collection to list + triggers on. + page_size (int): + Optional. The maximum number of results to + return on each page. + Note: The service may send fewer. + page_token (str): + Optional. The page token; provide the value + from the `next_page_token` field in a previous + call to retrieve the subsequent page. + + When paginating, all other parameters provided + must match the previous call that provided the + page token. + order_by (str): + Optional. The sorting order of the resources + returned. Value should be a comma-separated list + of fields. The default sorting order is + ascending. To specify descending order for a + field, append a `desc` suffix; for example: + + `name desc, update_time`. + filter (str): + Optional. The filter field that the list + request will filter on. Possible filtersare + described in https://google.aip.dev/160. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEnrollmentsResponse(proto.Message): + r"""The response message for the `ListEnrollments` method. + + Attributes: + enrollments (MutableSequence[google.cloud.eventarc_v1.types.Enrollment]): + The requested Enrollments, up to the number + specified in `page_size`. + next_page_token (str): + A page token that can be sent to + `ListEnrollments` to request the next page. If + this is empty, then there are no more pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + enrollments: MutableSequence[gce_enrollment.Enrollment] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_enrollment.Enrollment, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateEnrollmentRequest(proto.Message): + r"""The request message for the CreateEnrollment method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this enrollment. + enrollment (google.cloud.eventarc_v1.types.Enrollment): + Required. The enrollment to create. + enrollment_id (str): + Required. The user-provided ID to be assigned + to the Enrollment. It should match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + enrollment: gce_enrollment.Enrollment = proto.Field( + proto.MESSAGE, + number=2, + message=gce_enrollment.Enrollment, + ) + enrollment_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEnrollmentRequest(proto.Message): + r"""The request message for the UpdateEnrollment method. + + Attributes: + enrollment (google.cloud.eventarc_v1.types.Enrollment): + Required. The Enrollment to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; only + fields explicitly provided are updated. If no + field mask is provided, all provided fields in + the request are updated. To update all fields, + provide a field mask of "*". + allow_missing (bool): + Optional. If set to true, and the Enrollment + is not found, a new Enrollment will be created. + In this situation, `update_mask` is ignored. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + enrollment: gce_enrollment.Enrollment = proto.Field( + proto.MESSAGE, + number=1, + message=gce_enrollment.Enrollment, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteEnrollmentRequest(proto.Message): + r"""The request message for the DeleteEnrollment method. + + Attributes: + name (str): + Required. The name of the Enrollment to be + deleted. + etag (str): + Optional. If provided, the Enrollment will + only be deleted if the etag matches the current + etag on the resource. + allow_missing (bool): + Optional. If set to true, and the Enrollment + is not found, the request will succeed but no + action will be taken on the server. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetPipelineRequest(proto.Message): + r"""The request message for the GetPipeline method. + + Attributes: + name (str): + Required. The name of the pipeline to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPipelinesRequest(proto.Message): + r"""The request message for the ListPipelines method. + + Attributes: + parent (str): + Required. The parent collection to list + pipelines on. + page_size (int): + Optional. The maximum number of results to + return on each page. + Note: The service may send fewer. + page_token (str): + Optional. The page token; provide the value + from the `next_page_token` field in a previous + call to retrieve the subsequent page. + + When paginating, all other parameters provided + must match the previous call that provided the + page token. + order_by (str): + Optional. The sorting order of the resources + returned. Value should be a comma-separated list + of fields. The default sorting order is + ascending. To specify descending order for a + field, append a `desc` suffix; for example: + + `name desc, update_time`. + filter (str): + Optional. The filter field that the list + request will filter on. Possible filters are + described in https://google.aip.dev/160. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPipelinesResponse(proto.Message): + r"""The response message for the ListPipelines method. + + Attributes: + pipelines (MutableSequence[google.cloud.eventarc_v1.types.Pipeline]): + The requested pipelines, up to the number + specified in `page_size`. + next_page_token (str): + A page token that can be sent to + `ListPipelines` to request the next page. If + this is empty, then there are no more pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + pipelines: MutableSequence[gce_pipeline.Pipeline] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_pipeline.Pipeline, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreatePipelineRequest(proto.Message): + r"""The request message for the CreatePipeline method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this pipeline. + pipeline (google.cloud.eventarc_v1.types.Pipeline): + Required. The pipeline to create. + pipeline_id (str): + Required. The user-provided ID to be assigned + to the Pipeline. It should match the format + `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + pipeline: gce_pipeline.Pipeline = proto.Field( + proto.MESSAGE, + number=2, + message=gce_pipeline.Pipeline, + ) + pipeline_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdatePipelineRequest(proto.Message): + r"""The request message for the UpdatePipeline method. + + Attributes: + pipeline (google.cloud.eventarc_v1.types.Pipeline): + Required. The Pipeline to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; only + fields explicitly provided are updated. If no + field mask is provided, all provided fields in + the request are updated. To update all fields, + provide a field mask of "*". + allow_missing (bool): + Optional. If set to true, and the Pipeline is + not found, a new Pipeline will be created. In + this situation, `update_mask` is ignored. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + pipeline: gce_pipeline.Pipeline = proto.Field( + proto.MESSAGE, + number=1, + message=gce_pipeline.Pipeline, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeletePipelineRequest(proto.Message): + r"""The request message for the DeletePipeline method. + + Attributes: + name (str): + Required. The name of the Pipeline to be + deleted. + etag (str): + Optional. If provided, the Pipeline will only + be deleted if the etag matches the current etag + on the resource. + allow_missing (bool): + Optional. If set to true, and the Pipeline is + not found, the request will succeed but no + action will be taken on the server. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetGoogleApiSourceRequest(proto.Message): + r"""The request message for the GetGoogleApiSource method. + + Attributes: + name (str): + Required. The name of the google api source + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGoogleApiSourcesRequest(proto.Message): + r"""The request message for the ListGoogleApiSources method. + + Attributes: + parent (str): + Required. The parent collection to list + GoogleApiSources on. + page_size (int): + Optional. The maximum number of results to + return on each page. + Note: The service may send fewer. + page_token (str): + Optional. The page token; provide the value + from the `next_page_token` field in a previous + call to retrieve the subsequent page. + + When paginating, all other parameters provided + must match the previous call that provided the + page token. + order_by (str): + Optional. The sorting order of the resources + returned. Value should be a comma-separated list + of fields. The default sorting order is + ascending. To specify descending order for a + field, append a `desc` suffix; for example: + + `name desc, update_time`. + filter (str): + Optional. The filter field that the list + request will filter on. Possible filtersare + described in https://google.aip.dev/160. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGoogleApiSourcesResponse(proto.Message): + r"""The response message for the `ListGoogleApiSources` method. + + Attributes: + google_api_sources (MutableSequence[google.cloud.eventarc_v1.types.GoogleApiSource]): + The requested GoogleApiSources, up to the + number specified in `page_size`. + next_page_token (str): + A page token that can be sent to + `ListMessageBusEnrollments` to request the next + page. If this is empty, then there are no more + pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + google_api_sources: MutableSequence[gce_google_api_source.GoogleApiSource] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_google_api_source.GoogleApiSource, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateGoogleApiSourceRequest(proto.Message): + r"""The request message for the CreateGoogleApiSource method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this google api source. + google_api_source (google.cloud.eventarc_v1.types.GoogleApiSource): + Required. The google api source to create. + google_api_source_id (str): + Required. The user-provided ID to be assigned + to the GoogleApiSource. It should match the + format `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + google_api_source: gce_google_api_source.GoogleApiSource = proto.Field( + proto.MESSAGE, + number=2, + message=gce_google_api_source.GoogleApiSource, + ) + google_api_source_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateGoogleApiSourceRequest(proto.Message): + r"""The request message for the UpdateGoogleApiSource method. + + Attributes: + google_api_source (google.cloud.eventarc_v1.types.GoogleApiSource): + Required. The GoogleApiSource to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The fields to be updated; only + fields explicitly provided are updated. If no + field mask is provided, all provided fields in + the request are updated. To update all fields, + provide a field mask of "*". + allow_missing (bool): + Optional. If set to true, and the + GoogleApiSource is not found, a new + GoogleApiSource will be created. In this + situation, `update_mask` is ignored. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + google_api_source: gce_google_api_source.GoogleApiSource = proto.Field( + proto.MESSAGE, + number=1, + message=gce_google_api_source.GoogleApiSource, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteGoogleApiSourceRequest(proto.Message): + r"""The request message for the DeleteGoogleApiSource method. + + Attributes: + name (str): + Required. The name of the GoogleApiSource to + be deleted. + etag (str): + Optional. If provided, the MessageBus will + only be deleted if the etag matches the current + etag on the resource. + allow_missing (bool): + Optional. If set to true, and the MessageBus + is not found, the request will succeed but no + action will be taken on the server. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_api_source.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_api_source.py new file mode 100755 index 0000000000..8bd80b352f --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_api_source.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.eventarc_v1.types import logging_config as gce_logging_config +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'GoogleApiSource', + }, +) + + +class GoogleApiSource(proto.Message): + r"""A GoogleApiSource represents a subscription of 1P events from + a MessageBus. + + Attributes: + name (str): + Identifier. Resource name of the form + projects/{project}/locations/{location}/googleApiSources/{google_api_source} + uid (str): + Output only. Server assigned unique + identifier for the channel. The value is a UUID4 + string and guaranteed to remain unchanged until + the resource is deleted. + etag (str): + Output only. This checksum is computed by the + server based on the value of other fields, and + might be sent only on update and delete requests + to ensure that the client has an up-to-date + value before proceeding. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + labels (MutableMapping[str, str]): + Optional. Resource labels. + annotations (MutableMapping[str, str]): + Optional. Resource annotations. + display_name (str): + Optional. Resource display name. + destination (str): + Required. Destination is the message bus that + the GoogleApiSource is delivering to. It must be + point to the full resource name of a MessageBus. + Format: + + "projects/{PROJECT_ID}/locations/{region}/messagesBuses/{MESSAGE_BUS_ID) + crypto_key_name (str): + Optional. Resource name of a KMS crypto key + (managed by the user) used to encrypt/decrypt + their event data. + + It must match the pattern + `projects/*/locations/*/keyRings/*/cryptoKeys/*`. + logging_config (google.cloud.eventarc_v1.types.LoggingConfig): + Optional. Config to control Platform logging + for the GoogleApiSource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + display_name: str = proto.Field( + proto.STRING, + number=8, + ) + destination: str = proto.Field( + proto.STRING, + number=9, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=10, + ) + logging_config: gce_logging_config.LoggingConfig = proto.Field( + proto.MESSAGE, + number=11, + message=gce_logging_config.LoggingConfig, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py index 0f84298602..7655bffe98 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py @@ -42,6 +42,9 @@ class GoogleChannelConfig(proto.Message): Required. The resource name of the config. Must be in the format of, `projects/{project}/locations/{location}/googleChannelConfig`. + In API responses, the config name always + includes the projectID, regardless of whether + the projectID or projectNumber was provided. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last-modified time. crypto_key_name (str): @@ -51,6 +54,8 @@ class GoogleChannelConfig(proto.Message): It must match the pattern `projects/*/locations/*/keyRings/*/cryptoKeys/*`. + labels (MutableMapping[str, str]): + Optional. Resource labels. """ name: str = proto.Field( @@ -66,6 +71,11 @@ class GoogleChannelConfig(proto.Message): proto.STRING, number=7, ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/logging_config.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/logging_config.py new file mode 100755 index 0000000000..6b74ecc7f8 --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/logging_config.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'LoggingConfig', + }, +) + + +class LoggingConfig(proto.Message): + r"""The configuration for Platform Telemetry logging for Eventarc + Advanced resources. + + Attributes: + log_severity (google.cloud.eventarc_v1.types.LoggingConfig.LogSeverity): + Optional. The minimum severity of logs that + will be sent to Stackdriver/Platform Telemetry. + Logs at severitiy ≥ this value will be sent, + unless it is NONE. + """ + class LogSeverity(proto.Enum): + r"""The different severities for logging supported by Eventarc + Advanced resources. + This enum is an exhaustive list of log severities and is FROZEN. + Do not expect new values to be added. + + Values: + LOG_SEVERITY_UNSPECIFIED (0): + Log severity is not specified. This value is + treated the same as NONE, but is used to + distinguish between no update and update to NONE + in update_masks. + NONE (1): + Default value at resource creation, presence + of this value must be treated as no + logging/disable logging. + DEBUG (2): + Debug or trace level logging. + INFO (3): + Routine information, such as ongoing status + or performance. + NOTICE (4): + Normal but significant events, such as start + up, shut down, or a configuration change. + WARNING (5): + Warning events might cause problems. + ERROR (6): + Error events are likely to cause problems. + CRITICAL (7): + Critical events cause more severe problems or + outages. + ALERT (8): + A person must take action immediately. + EMERGENCY (9): + One or more systems are unusable. + """ + LOG_SEVERITY_UNSPECIFIED = 0 + NONE = 1 + DEBUG = 2 + INFO = 3 + NOTICE = 4 + WARNING = 5 + ERROR = 6 + CRITICAL = 7 + ALERT = 8 + EMERGENCY = 9 + + log_severity: LogSeverity = proto.Field( + proto.ENUM, + number=1, + enum=LogSeverity, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/message_bus.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/message_bus.py new file mode 100755 index 0000000000..6362cfd629 --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/message_bus.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.eventarc_v1.types import logging_config as gce_logging_config +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'MessageBus', + }, +) + + +class MessageBus(proto.Message): + r"""MessageBus for the messages flowing through the system. The + admin has visibility and control over the messages being + published and consumed and can restrict publishers and + subscribers to only a subset of data available in the system by + defining authorization policies. + + Attributes: + name (str): + Identifier. Resource name of the form + projects/{project}/locations/{location}/messageBuses/{message_bus} + uid (str): + Output only. Server assigned unique + identifier for the channel. The value is a UUID4 + string and guaranteed to remain unchanged until + the resource is deleted. + etag (str): + Output only. This checksum is computed by the + server based on the value of other fields, and + might be sent only on update and delete requests + to ensure that the client has an up-to-date + value before proceeding. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + labels (MutableMapping[str, str]): + Optional. Resource labels. + annotations (MutableMapping[str, str]): + Optional. Resource annotations. + display_name (str): + Optional. Resource display name. + crypto_key_name (str): + Optional. Resource name of a KMS crypto key + (managed by the user) used to encrypt/decrypt + their event data. + + It must match the pattern + `projects/*/locations/*/keyRings/*/cryptoKeys/*`. + logging_config (google.cloud.eventarc_v1.types.LoggingConfig): + Optional. Config to control Platform logging + for the Message Bus. This log configuration is + applied to the Message Bus itself, and all the + Enrollments attached to it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + display_name: str = proto.Field( + proto.STRING, + number=8, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=10, + ) + logging_config: gce_logging_config.LoggingConfig = proto.Field( + proto.MESSAGE, + number=11, + message=gce_logging_config.LoggingConfig, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/network_config.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/network_config.py new file mode 100755 index 0000000000..591bd7295f --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/network_config.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'NetworkConfig', + }, +) + + +class NetworkConfig(proto.Message): + r"""Network Configuration that can be inherited by other protos. + + Attributes: + network_attachment (str): + Required. Name of the NetworkAttachment that + allows access to the customer's VPC. Format: + + `projects/{PROJECT_ID}/regions/{REGION}/networkAttachments/{NETWORK_ATTACHMENT_NAME}` + """ + + network_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/pipeline.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/pipeline.py new file mode 100755 index 0000000000..fa8c8d8a0c --- /dev/null +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/pipeline.py @@ -0,0 +1,948 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.eventarc_v1.types import logging_config as gce_logging_config +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'Pipeline', + }, +) + + +class Pipeline(proto.Message): + r"""A representation of the Pipeline resource. + + Attributes: + name (str): + Identifier. The resource name of the + Pipeline. Must be unique within the location of + the project and must be in + `projects/{project}/locations/{location}/pipelines/{pipeline}` + format. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + A timestamp in RFC3339 UTC "Zulu" format, with + nanosecond resolution and up to nine fractional + digits. Examples: "2014-10-02T15:01:23Z" and + "2014-10-02T15:01:23.045123456Z". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + A timestamp in RFC3339 UTC "Zulu" format, with + nanosecond resolution and up to nine fractional + digits. Examples: "2014-10-02T15:01:23Z" and + "2014-10-02T15:01:23.045123456Z". + labels (MutableMapping[str, str]): + Optional. User labels attached to the + Pipeline that can be used to group resources. An + object containing a list of "key": value pairs. + Example: { "name": "wrench", "mass": "1.3kg", + "count": "3" }. + uid (str): + Output only. Server-assigned unique + identifier for the Pipeline. The value is a + UUID4 string and guaranteed to remain unchanged + until the resource is deleted. + annotations (MutableMapping[str, str]): + Optional. User-defined annotations. See + https://google.aip.dev/128#annotations. + display_name (str): + Optional. Display name of resource. + destinations (MutableSequence[google.cloud.eventarc_v1.types.Pipeline.Destination]): + Required. List of destinations to which + messages will be forwarded. Currently, exactly + one destination is supported per Pipeline. + mediations (MutableSequence[google.cloud.eventarc_v1.types.Pipeline.Mediation]): + Optional. List of mediation operations to be + performed on the message. Currently, only one + Transformation operation is allowed in each + Pipeline. + crypto_key_name (str): + Optional. Resource name of a KMS crypto key + (managed by the user) used to encrypt/decrypt + the event data. If not set, an internal + Google-owned key will be used to encrypt + messages. It must match the pattern + "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}". + input_payload_format (google.cloud.eventarc_v1.types.Pipeline.MessagePayloadFormat): + Optional. The payload format expected for the + messages received by the Pipeline. If + input_payload_format is set then any messages + not matching this format will be treated as + persistent errors. If input_payload_format is + not set, then the message data will be treated + as an opaque binary and no output format can be + set on the Pipeline through the + Pipeline.Destination.output_payload_format + field. Any Mediations on the Pipeline that + involve access to the data field will fail as + persistent errors. + logging_config (google.cloud.eventarc_v1.types.LoggingConfig): + Optional. Config to control Platform Logging + for Pipelines. + retry_policy (google.cloud.eventarc_v1.types.Pipeline.RetryPolicy): + Optional. The retry policy to use in the + pipeline. + etag (str): + Output only. This checksum is computed by the + server based on the value of other fields, and + might be sent only on create requests to ensure + that the client has an up-to-date value before + proceeding. + satisfies_pzs (bool): + Output only. Whether or not this Pipeline + satisfies the requirements of physical zone + separation + """ + + class MessagePayloadFormat(proto.Message): + r"""Represents the format of message data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + protobuf (google.cloud.eventarc_v1.types.Pipeline.MessagePayloadFormat.ProtobufFormat): + Optional. Protobuf format. + + This field is a member of `oneof`_ ``kind``. + avro (google.cloud.eventarc_v1.types.Pipeline.MessagePayloadFormat.AvroFormat): + Optional. Avro format. + + This field is a member of `oneof`_ ``kind``. + json (google.cloud.eventarc_v1.types.Pipeline.MessagePayloadFormat.JsonFormat): + Optional. JSON format. + + This field is a member of `oneof`_ ``kind``. + """ + + class JsonFormat(proto.Message): + r"""The format of a JSON message payload. + """ + + class ProtobufFormat(proto.Message): + r"""The format of a Protobuf message payload. + + Attributes: + schema_definition (str): + Optional. The entire schema definition is + stored in this field. + """ + + schema_definition: str = proto.Field( + proto.STRING, + number=1, + ) + + class AvroFormat(proto.Message): + r"""The format of an AVRO message payload. + + Attributes: + schema_definition (str): + Optional. The entire schema definition is + stored in this field. + """ + + schema_definition: str = proto.Field( + proto.STRING, + number=1, + ) + + protobuf: 'Pipeline.MessagePayloadFormat.ProtobufFormat' = proto.Field( + proto.MESSAGE, + number=1, + oneof='kind', + message='Pipeline.MessagePayloadFormat.ProtobufFormat', + ) + avro: 'Pipeline.MessagePayloadFormat.AvroFormat' = proto.Field( + proto.MESSAGE, + number=2, + oneof='kind', + message='Pipeline.MessagePayloadFormat.AvroFormat', + ) + json: 'Pipeline.MessagePayloadFormat.JsonFormat' = proto.Field( + proto.MESSAGE, + number=3, + oneof='kind', + message='Pipeline.MessagePayloadFormat.JsonFormat', + ) + + class Destination(proto.Message): + r"""Represents a target of an invocation over HTTP. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_config (google.cloud.eventarc_v1.types.Pipeline.Destination.NetworkConfig): + Optional. Network config is used to configure + how Pipeline resolves and connects to a + destination. + http_endpoint (google.cloud.eventarc_v1.types.Pipeline.Destination.HttpEndpoint): + Optional. An HTTP endpoint destination + described by an URI. If a DNS FQDN is provided + as the endpoint, Pipeline will create a peering + zone to the consumer VPC and forward DNS + requests to the VPC specified by network config + to resolve the service endpoint. See: + + https://cloud.google.com/dns/docs/zones/zones-overview#peering_zones + + This field is a member of `oneof`_ ``destination_descriptor``. + workflow (str): + Optional. The resource name of the Workflow + whose Executions are triggered by the events. + The Workflow resource should be deployed in the + same project as the Pipeline. Format: + + `projects/{project}/locations/{location}/workflows/{workflow}` + + This field is a member of `oneof`_ ``destination_descriptor``. + message_bus (str): + Optional. The resource name of the Message + Bus to which events should be published. The + Message Bus resource should exist in the same + project as the Pipeline. Format: + + `projects/{project}/locations/{location}/messageBuses/{message_bus}` + + This field is a member of `oneof`_ ``destination_descriptor``. + topic (str): + Optional. The resource name of the Pub/Sub + topic to which events should be published. + Format: + + `projects/{project}/locations/{location}/topics/{topic}` + + This field is a member of `oneof`_ ``destination_descriptor``. + authentication_config (google.cloud.eventarc_v1.types.Pipeline.Destination.AuthenticationConfig): + Optional. An authentication config used to + authenticate message requests, such that + destinations can verify the source. For example, + this can be used with private Google Cloud + destinations that require Google Cloud + credentials for access like Cloud Run. This + field is optional and should be set only by + users interested in authenticated push. + output_payload_format (google.cloud.eventarc_v1.types.Pipeline.MessagePayloadFormat): + Optional. The message format before it is + delivered to the destination. If not set, the + message will be delivered in the format it was + originally delivered to the Pipeline. This field + can only be set if Pipeline.input_payload_format + is also set. + """ + + class NetworkConfig(proto.Message): + r"""Represents a network config to be used for destination + resolution and connectivity. + + Attributes: + network_attachment (str): + Required. Name of the NetworkAttachment that + allows access to the consumer VPC. Format: + + `projects/{PROJECT_ID}/regions/{REGION}/networkAttachments/{NETWORK_ATTACHMENT_NAME}` + """ + + network_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + + class HttpEndpoint(proto.Message): + r"""Represents a HTTP endpoint destination. + + Attributes: + uri (str): + Required. The URI of the HTTP endpoint. + + The value must be a RFC2396 URI string. + Examples: + `https://svc.us-central1.p.local:8080/route`. + Only the HTTPS protocol is supported. + message_binding_template (str): + Optional. The CEL expression used to modify + how the destination-bound HTTP request is + constructed. + + If a binding expression is not specified here, + the message is treated as a CloudEvent and is + mapped to the HTTP request according to the + CloudEvent HTTP Protocol Binding Binary Content + Mode + (https://github.com/cloudevents/spec/blob/main/cloudevents/bindings/http-protocol-binding.md#31-binary-content-mode). + In this representation, all fields except the + `data` and `datacontenttype` field on the + message are mapped to HTTP request headers with + a prefix of `ce-`. + + To construct the HTTP request payload and the + value of the content-type HTTP header, the + payload format is defined as follows: + + 1) Use the output_payload_format_type on the + Pipeline.Destination if it is set, else: + + 2) Use the input_payload_format_type on the + Pipeline if it is set, else: + + 3) Treat the payload as opaque binary data. + + The `data` field of the message is converted to + the payload format or left as-is for case 3) and + then attached as the payload of the HTTP + request. The `content-type` header on the HTTP + request is set to the payload format type or + left empty for case 3). However, if a mediation + has updated the `datacontenttype` field on the + message so that it is not the same as the + payload format type but it is still a prefix of + the payload format type, then the `content-type` + header on the HTTP request is set to this + `datacontenttype` value. For example, if the + `datacontenttype` is "application/json" and the + payload format type is "application/json; + charset=utf-8", then the `content-type` header + on the HTTP request is set to "application/json; + charset=utf-8". + + If a non-empty binding expression is specified + then this expression is used to modify the + default CloudEvent HTTP Protocol Binding Binary + Content representation. + The result of the CEL expression must be a map + of key/value pairs which is used as follows: + + - If a map named `headers` exists on the result + of the expression, then its key/value pairs + are directly mapped to the HTTP request + headers. The headers values are constructed + from the corresponding value type's canonical + representation. If the `headers` field doesn't + exist then the resulting HTTP request will be + the headers of the CloudEvent HTTP Binding + Binary Content Mode representation of the + final message. Note: If the specified binding + expression, has updated the `datacontenttype` + field on the message so that it is not the + same as the payload format type but it is + still a prefix of the payload format type, + then the `content-type` header in the + `headers` map is set to this `datacontenttype` + value. + - If a field named `body` exists on the result + of the expression then its value is directly + mapped to the body of the request. If the + value of the `body` field is of type bytes or + string then it is used for the HTTP request + body as-is, with no conversion. If the body + field is of any other type then it is + converted to a JSON string. If the body field + does not exist then the resulting payload of + the HTTP request will be data value of the + CloudEvent HTTP Binding Binary Content Mode + representation of the final message as + described earlier. + - Any other fields in the resulting expression + will be ignored. + + The CEL expression may access the incoming + CloudEvent message in its definition, as + follows: + + - The `data` field of the incoming CloudEvent + message can be accessed using the + `message.data` value. Subfields of + `message.data` may also be accessed if an + input_payload_format has been specified on the + Pipeline. + - Each attribute of the incoming CloudEvent + message can be accessed using the + `message.` value, where is replaced + with the name of the attribute. + - Existing headers can be accessed in the CEL + expression using the `headers` variable. The + `headers` variable defines a map of key/value + pairs corresponding to the HTTP headers of the + CloudEvent HTTP Binding Binary Content Mode + representation of the final message as + described earlier. For example, the following + CEL expression can be used to construct an + HTTP request by adding an additional header to + the HTTP headers of the CloudEvent HTTP + Binding Binary Content Mode representation of + the final message and by overwriting the body + of the request: + + ``` + { + "headers": headers.merge({"new-header-key": + "new-header-value"}), "body": "new-body" + } + ``` + - The default binding for the message payload + can be accessed using the `body` variable. It + conatins a string representation of the + message payload in the format specified by the + `output_payload_format` field. If the + `input_payload_format` field is not set, the + `body` variable contains the same message + payload bytes that were published. + + Additionally, the following CEL extension + functions are provided for use in this CEL + expression: + + - toBase64Url: + + map.toBase64Url() -> string + - Converts a CelValue to a base64url encoded + string + - toJsonString: map.toJsonString() -> string + - Converts a CelValue to a JSON string + - merge: + + map1.merge(map2) -> map3 + - Merges the passed CEL map with the + existing CEL map the function is applied + to. + - If the same key exists in both maps, if + the key's value is type map both maps are + merged else the value from the passed map is + used. + - denormalize: + + map.denormalize() -> map + - Denormalizes a CEL map such that every + value of type map or key in the map is + expanded to return a single level map. + - The resulting keys are "." separated + indices of the map keys. + - For example: + + { + "a": 1, + "b": { + "c": 2, + "d": 3 + } + "e": [4, 5] + } + .denormalize() + -> { + "a": 1, + "b.c": 2, + "b.d": 3, + "e.0": 4, + "e.1": 5 + } + - setField: + + map.setField(key, value) -> message + - Sets the field of the message with the + given key to the given value. + - If the field is not present it will be + added. + - If the field is present it will be + overwritten. + - The key can be a dot separated path to set + a field in a nested message. + - Key must be of type string. + - Value may be any valid type. + - removeFields: + + map.removeFields([key1, key2, ...]) -> message + - Removes the fields of the map with the + given keys. + - The keys can be a dot separated path to + remove a field in a nested message. + - If a key is not found it will be ignored. + - Keys must be of type string. + - toMap: + + [map1, map2, ...].toMap() -> map + - Converts a CEL list of CEL maps to a + single CEL map + - toCloudEventJsonWithPayloadFormat: + + message.toCloudEventJsonWithPayloadFormat() -> + map + - Converts a message to the corresponding + structure of JSON format for CloudEvents. + - It converts `data` to destination payload + format specified in + `output_payload_format`. If + `output_payload_format` is not set, the + data will remain unchanged. + - It also sets the corresponding + datacontenttype of the CloudEvent, as + indicated by + `output_payload_format`. If no + `output_payload_format` is set it will use + the value of the "datacontenttype" attribute + on the CloudEvent if present, else remove + "datacontenttype" attribute. + - This function expects that the content of + the message will adhere to the standard + CloudEvent format. If it doesn't then this + function will fail. + - The result is a CEL map that corresponds + to the JSON representation of the + CloudEvent. To convert that data to a JSON + string it can be chained with the toJsonString + function. + + The Pipeline expects that the message it + receives adheres to the standard CloudEvent + format. If it doesn't then the outgoing message + request may fail with a persistent error. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + message_binding_template: str = proto.Field( + proto.STRING, + number=3, + ) + + class AuthenticationConfig(proto.Message): + r"""Represents a config used to authenticate message requests. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + google_oidc (google.cloud.eventarc_v1.types.Pipeline.Destination.AuthenticationConfig.OidcToken): + Optional. This authenticate method will apply + Google OIDC tokens signed by a Google Cloud + service account to the requests. + + This field is a member of `oneof`_ ``authentication_method_descriptor``. + oauth_token (google.cloud.eventarc_v1.types.Pipeline.Destination.AuthenticationConfig.OAuthToken): + Optional. If specified, an [OAuth + token](https://developers.google.com/identity/protocols/OAuth2) + will be generated and attached as an + `Authorization` header in the HTTP request. + + This type of authorization should generally only + be used when calling Google APIs hosted on + *.googleapis.com. + + This field is a member of `oneof`_ ``authentication_method_descriptor``. + """ + + class OidcToken(proto.Message): + r"""Represents a config used to authenticate with a Google OIDC + token using a Google Cloud service account. Use this + authentication method to invoke your Cloud Run and Cloud + Functions destinations or HTTP endpoints that support Google + OIDC. + + Attributes: + service_account (str): + Required. Service account email used to + generate the OIDC Token. The principal who calls + this API must have iam.serviceAccounts.actAs + permission in the service account. See + https://cloud.google.com/iam/docs/understanding-service-accounts + for more information. Eventarc service agents + must have + roles/roles/iam.serviceAccountTokenCreator role + to allow the Pipeline to create OpenID tokens + for authenticated requests. + audience (str): + Optional. Audience to be used to generate the + OIDC Token. The audience claim identifies the + recipient that the JWT is intended for. If + unspecified, the destination URI will be used. + """ + + service_account: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + + class OAuthToken(proto.Message): + r"""Contains information needed for generating an + [OAuth + token](https://developers.google.com/identity/protocols/OAuth2). + This type of authorization should generally only be used when + calling Google APIs hosted on *.googleapis.com. + + Attributes: + service_account (str): + Required. Service account email used to + generate the [OAuth + token](https://developers.google.com/identity/protocols/OAuth2). + The principal who calls this API must have + iam.serviceAccounts.actAs permission in the + service account. See + https://cloud.google.com/iam/docs/understanding-service-accounts + for more information. Eventarc service agents + must have + roles/roles/iam.serviceAccountTokenCreator role + to allow Pipeline to create OAuth2 tokens for + authenticated requests. + scope (str): + Optional. OAuth scope to be used for + generating OAuth access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account: str = proto.Field( + proto.STRING, + number=1, + ) + scope: str = proto.Field( + proto.STRING, + number=2, + ) + + google_oidc: 'Pipeline.Destination.AuthenticationConfig.OidcToken' = proto.Field( + proto.MESSAGE, + number=1, + oneof='authentication_method_descriptor', + message='Pipeline.Destination.AuthenticationConfig.OidcToken', + ) + oauth_token: 'Pipeline.Destination.AuthenticationConfig.OAuthToken' = proto.Field( + proto.MESSAGE, + number=2, + oneof='authentication_method_descriptor', + message='Pipeline.Destination.AuthenticationConfig.OAuthToken', + ) + + network_config: 'Pipeline.Destination.NetworkConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='Pipeline.Destination.NetworkConfig', + ) + http_endpoint: 'Pipeline.Destination.HttpEndpoint' = proto.Field( + proto.MESSAGE, + number=2, + oneof='destination_descriptor', + message='Pipeline.Destination.HttpEndpoint', + ) + workflow: str = proto.Field( + proto.STRING, + number=3, + oneof='destination_descriptor', + ) + message_bus: str = proto.Field( + proto.STRING, + number=4, + oneof='destination_descriptor', + ) + topic: str = proto.Field( + proto.STRING, + number=8, + oneof='destination_descriptor', + ) + authentication_config: 'Pipeline.Destination.AuthenticationConfig' = proto.Field( + proto.MESSAGE, + number=5, + message='Pipeline.Destination.AuthenticationConfig', + ) + output_payload_format: 'Pipeline.MessagePayloadFormat' = proto.Field( + proto.MESSAGE, + number=6, + message='Pipeline.MessagePayloadFormat', + ) + + class Mediation(proto.Message): + r"""Mediation defines different ways to modify the Pipeline. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transformation (google.cloud.eventarc_v1.types.Pipeline.Mediation.Transformation): + Optional. How the Pipeline is to transform + messages + + This field is a member of `oneof`_ ``mediation_descriptor``. + """ + + class Transformation(proto.Message): + r"""Transformation defines the way to transform an incoming + message. + + Attributes: + transformation_template (str): + Optional. The CEL expression template to + apply to transform messages. The following CEL + extension functions are provided for use in this + CEL expression: + + - merge: + + map1.merge(map2) -> map3 + - Merges the passed CEL map with the + existing CEL map the function is applied + to. + - If the same key exists in both maps, if + the key's value is type map both maps are + merged else the value from the passed map is + used. + - denormalize: + + map.denormalize() -> map + - Denormalizes a CEL map such that every + value of type map or key in the map is + expanded to return a single level map. + - The resulting keys are "." separated + indices of the map keys. + - For example: + + { + "a": 1, + "b": { + "c": 2, + "d": 3 + } + "e": [4, 5] + } + .denormalize() + -> { + "a": 1, + "b.c": 2, + "b.d": 3, + "e.0": 4, + "e.1": 5 + } + - setField: + + map.setField(key, value) -> message + - Sets the field of the message with the + given key to the given value. + - If the field is not present it will be + added. + - If the field is present it will be + overwritten. + - The key can be a dot separated path to set + a field in a nested message. + - Key must be of type string. + - Value may be any valid type. + - removeFields: + + map.removeFields([key1, key2, ...]) -> message + - Removes the fields of the map with the + given keys. + - The keys can be a dot separated path to + remove a field in a nested message. + - If a key is not found it will be ignored. + - Keys must be of type string. + - toMap: + + [map1, map2, ...].toMap() -> map + - Converts a CEL list of CEL maps to a + single CEL map + - toDestinationPayloadFormat(): + + message.data.toDestinationPayloadFormat() -> + string or bytes + - Converts the message data to the + destination payload format specified in + Pipeline.Destination.output_payload_format + - This function is meant to be applied to + the message.data field. + - If the destination payload format is not + set, the function will return the message + data unchanged. + - toCloudEventJsonWithPayloadFormat: + + message.toCloudEventJsonWithPayloadFormat() -> + map + - Converts a message to the corresponding + structure of JSON format for CloudEvents + - This function applies + toDestinationPayloadFormat() to the + message data. It also sets the corresponding + datacontenttype of the CloudEvent, as + indicated by + Pipeline.Destination.output_payload_format. + If no output_payload_format is set it will + use the existing datacontenttype on the + CloudEvent if present, else leave + datacontenttype absent. + - This function expects that the content of + the message will adhere to the standard + CloudEvent format. If it doesn't then this + function will fail. + - The result is a CEL map that corresponds + to the JSON representation of the + CloudEvent. To convert that data to a JSON + string it can be chained with the toJsonString + function. + """ + + transformation_template: str = proto.Field( + proto.STRING, + number=1, + ) + + transformation: 'Pipeline.Mediation.Transformation' = proto.Field( + proto.MESSAGE, + number=1, + oneof='mediation_descriptor', + message='Pipeline.Mediation.Transformation', + ) + + class RetryPolicy(proto.Message): + r"""The retry policy configuration for the Pipeline. The pipeline + exponentially backs off in case the destination is non + responsive or returns a retryable error code. The default + semantics are as follows: + + The backoff starts with a 5 second delay and doubles the delay + after each failed attempt (10 seconds, 20 seconds, 40 seconds, + etc.). The delay is capped at 60 seconds by default. + Please note that if you set the min_retry_delay and + max_retry_delay fields to the same value this will make the + duration between retries constant. + + Attributes: + max_attempts (int): + Optional. The maximum number of delivery + attempts for any message. The value must be + between 1 and 100. The default value for this + field is 5. + min_retry_delay (google.protobuf.duration_pb2.Duration): + Optional. The minimum amount of seconds to + wait between retry attempts. The value must be + between 1 and 600. The default value for this + field is 5. + max_retry_delay (google.protobuf.duration_pb2.Duration): + Optional. The maximum amount of seconds to + wait between retry attempts. The value must be + between 1 and 600. The default value for this + field is 60. + """ + + max_attempts: int = proto.Field( + proto.INT32, + number=1, + ) + min_retry_delay: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + max_retry_delay: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + uid: str = proto.Field( + proto.STRING, + number=5, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + display_name: str = proto.Field( + proto.STRING, + number=7, + ) + destinations: MutableSequence[Destination] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=Destination, + ) + mediations: MutableSequence[Mediation] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=Mediation, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=10, + ) + input_payload_format: MessagePayloadFormat = proto.Field( + proto.MESSAGE, + number=11, + message=MessagePayloadFormat, + ) + logging_config: gce_logging_config.LoggingConfig = proto.Field( + proto.MESSAGE, + number=12, + message=gce_logging_config.LoggingConfig, + ) + retry_policy: RetryPolicy = proto.Field( + proto.MESSAGE, + number=13, + message=RetryPolicy, + ) + etag: str = proto.Field( + proto.STRING, + number=99, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=14, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 57448bc893..35931d74ee 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -19,6 +19,7 @@ import proto # type: ignore +from google.cloud.eventarc_v1.types import network_config as gce_network_config from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import code_pb2 # type: ignore @@ -34,6 +35,7 @@ 'CloudRun', 'GKE', 'Pubsub', + 'HttpEndpoint', }, ) @@ -67,26 +69,19 @@ class Trigger(proto.Message): associated with the trigger. The service account represents the identity of the trigger. - The principal who calls this API must have the - `iam.serviceAccounts.actAs` permission in the - service account. See - https://cloud.google.com/iam/docs/understanding-service-accounts?hl=en#sa_common - for more information. - - For Cloud Run destinations, this service account - is used to generate identity tokens when - invoking the service. See - https://cloud.google.com/run/docs/triggering/pubsub-push#create-service-account - for information on how to invoke authenticated - Cloud Run services. To create Audit Log - triggers, the service account should also have - the `roles/eventarc.eventReceiver` IAM role. + The `iam.serviceAccounts.actAs` permission must + be granted on the service account to allow a + principal to impersonate the service account. + For more information, see the + [Roles and + permissions](/eventarc/docs/all-roles-permissions) + page specific to the trigger destination. destination (google.cloud.eventarc_v1.types.Destination): Required. Destination specifies where the events should be sent to. transport (google.cloud.eventarc_v1.types.Transport): Optional. To deliver messages, Eventarc might - use other GCP products as a transport + use other Google Cloud products as a transport intermediary. This field contains a reference to that transport intermediary. This information can be used for debugging purposes. @@ -102,6 +97,15 @@ class Trigger(proto.Message): conditions (MutableMapping[str, google.cloud.eventarc_v1.types.StateCondition]): Output only. The reason(s) why a trigger is in FAILED state. + event_data_content_type (str): + Optional. EventDataContentType specifies the + type of payload in MIME format that is expected + from the CloudEvent data field. This is set to + `application/json` if the value is not defined. + satisfies_pzs (bool): + Output only. Whether or not this Trigger + satisfies the requirements of physical zone + separation etag (str): Output only. This checksum is computed by the server based on the value of other fields, and @@ -162,6 +166,14 @@ class Trigger(proto.Message): number=15, message='StateCondition', ) + event_data_content_type: str = proto.Field( + proto.STRING, + number=16, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=19, + ) etag: str = proto.Field( proto.STRING, number=99, @@ -176,7 +188,9 @@ class EventFilter(proto.Message): attribute (str): Required. The name of a CloudEvents attribute. Currently, only a subset of - attributes are supported for filtering. + attributes are supported for filtering. You can + [retrieve a specific provider's supported event + types](/eventarc/docs/list-providers#describe-provider). All triggers MUST provide a filter for the 'type' attribute. @@ -187,8 +201,9 @@ class EventFilter(proto.Message): events with the value of the filter. If not specified, only events that have an exact key-value pair specified in the filter are - matched. The only allowed value is - `match-path-pattern`. + matched. The allowed values are `path_pattern` + and `match-path-pattern`. `path_pattern` is only + allowed for GCFv1 triggers. """ attribute: str = proto.Field( @@ -244,10 +259,16 @@ class Destination(proto.Message): This field is a member of `oneof`_ ``descriptor``. cloud_function (str): - The Cloud Function resource name. Only Cloud - Functions V2 is supported. Format: + The Cloud Function resource name. Cloud + Functions V1 and V2 are supported. + Format: `projects/{project}/locations/{location}/functions/{function}` + This is a read-only field. Creating Cloud + Functions V1/V2 triggers is only supported via + the Cloud Functions product. An error will be + returned if the user sets this value. + This field is a member of `oneof`_ ``descriptor``. gke (google.cloud.eventarc_v1.types.GKE): A GKE service capable of receiving events. @@ -264,6 +285,16 @@ class Destination(proto.Message): `projects/{project}/locations/{location}/workflows/{workflow}` This field is a member of `oneof`_ ``descriptor``. + http_endpoint (google.cloud.eventarc_v1.types.HttpEndpoint): + An HTTP endpoint destination described by an + URI. + + This field is a member of `oneof`_ ``descriptor``. + network_config (google.cloud.eventarc_v1.types.NetworkConfig): + Optional. Network config is used to configure + how Eventarc resolves and connect to a + destination. This should only be used with + HttpEndpoint destination type. """ cloud_run: 'CloudRun' = proto.Field( @@ -288,6 +319,17 @@ class Destination(proto.Message): number=4, oneof='descriptor', ) + http_endpoint: 'HttpEndpoint' = proto.Field( + proto.MESSAGE, + number=5, + oneof='descriptor', + message='HttpEndpoint', + ) + network_config: gce_network_config.NetworkConfig = proto.Field( + proto.MESSAGE, + number=6, + message=gce_network_config.NetworkConfig, + ) class Transport(proto.Message): @@ -327,6 +369,7 @@ class CloudRun(proto.Message): path (str): Optional. The relative path on the Cloud Run service the events should be sent to. + The value must conform to the definition of a URI path segment (section 3.3 of RFC2396). Examples: "/route", "route", "route/subroute". @@ -373,6 +416,7 @@ class GKE(proto.Message): path (str): Optional. The relative path on the GKE service the events should be sent to. + The value must conform to the definition of a URI path segment (section 3.3 of RFC2396). Examples: "/route", "route", "route/subroute". @@ -434,4 +478,27 @@ class Pubsub(proto.Message): ) +class HttpEndpoint(proto.Message): + r"""Represents a HTTP endpoint destination. + + Attributes: + uri (str): + Required. The URI of the HTTP endpoint. + + The value must be a RFC2396 URI string. + Examples: `http://10.10.10.8:80/route`, + `http://svc.us-central1.p.local:8080/`. + Only HTTP and HTTPS protocols are supported. The + host can be either a static IP addressable from + the VPC specified by the network config, or an + internal DNS hostname of the service resolvable + via Cloud DNS. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py index 48d01b7a94..87c83a50e2 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py @@ -47,7 +47,6 @@ async def sample_create_channel(): parent="parent_value", channel=channel, channel_id="channel_id_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py index 15c6ce7f87..3cff394efd 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py @@ -47,7 +47,6 @@ def sample_create_channel(): parent="parent_value", channel=channel, channel_id="channel_id_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_async.py new file mode 100755 index 0000000000..3b388ab6a7 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateEnrollment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.CreateEnrollmentRequest( + parent="parent_value", + enrollment=enrollment, + enrollment_id="enrollment_id_value", + ) + + # Make the request + operation = client.create_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateEnrollment_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_sync.py new file mode 100755 index 0000000000..2380ffa531 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_enrollment_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateEnrollment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.CreateEnrollmentRequest( + parent="parent_value", + enrollment=enrollment, + enrollment_id="enrollment_id_value", + ) + + # Make the request + operation = client.create_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateEnrollment_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py new file mode 100755 index 0000000000..2a74d09d80 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateGoogleApiSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.CreateGoogleApiSourceRequest( + parent="parent_value", + google_api_source=google_api_source, + google_api_source_id="google_api_source_id_value", + ) + + # Make the request + operation = client.create_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateGoogleApiSource_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py new file mode 100755 index 0000000000..321e972bd7 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateGoogleApiSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.CreateGoogleApiSourceRequest( + parent="parent_value", + google_api_source=google_api_source, + google_api_source_id="google_api_source_id_value", + ) + + # Make the request + operation = client.create_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateGoogleApiSource_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_async.py new file mode 100755 index 0000000000..9460290dfe --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateMessageBus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.CreateMessageBusRequest( + parent="parent_value", + message_bus_id="message_bus_id_value", + ) + + # Make the request + operation = client.create_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateMessageBus_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_sync.py new file mode 100755 index 0000000000..5cf59b541b --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_message_bus_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateMessageBus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.CreateMessageBusRequest( + parent="parent_value", + message_bus_id="message_bus_id_value", + ) + + # Make the request + operation = client.create_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateMessageBus_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_async.py new file mode 100755 index 0000000000..5a2906f7e8 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreatePipeline_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.CreatePipelineRequest( + parent="parent_value", + pipeline=pipeline, + pipeline_id="pipeline_id_value", + ) + + # Make the request + operation = client.create_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreatePipeline_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_sync.py new file mode 100755 index 0000000000..24a7e63211 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_pipeline_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreatePipeline_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.CreatePipelineRequest( + parent="parent_value", + pipeline=pipeline, + pipeline_id="pipeline_id_value", + ) + + # Make the request + operation = client.create_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreatePipeline_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py index 92756c2163..7f8aa65138 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -50,7 +50,6 @@ async def sample_create_trigger(): parent="parent_value", trigger=trigger, trigger_id="trigger_id_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py index 0df4f6f6d1..226e731ab5 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py @@ -50,7 +50,6 @@ def sample_create_trigger(): parent="parent_value", trigger=trigger, trigger_id="trigger_id_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py index eb88c5b18e..ced333af37 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py @@ -41,7 +41,6 @@ async def sample_delete_channel(): # Initialize request argument(s) request = eventarc_v1.DeleteChannelRequest( name="name_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py index dc9b80adbc..07fc92787a 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py @@ -41,7 +41,6 @@ def sample_delete_channel(): # Initialize request argument(s) request = eventarc_v1.DeleteChannelRequest( name="name_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_async.py new file mode 100755 index 0000000000..1e3d923842 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteEnrollment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteEnrollmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteEnrollment_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_sync.py new file mode 100755 index 0000000000..861755e349 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_enrollment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteEnrollment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteEnrollmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteEnrollment_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_async.py new file mode 100755 index 0000000000..4f9f875cf7 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteGoogleApiSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteGoogleApiSource_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_sync.py new file mode 100755 index 0000000000..e75bacd726 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_google_api_source_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteGoogleApiSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteGoogleApiSource_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_async.py new file mode 100755 index 0000000000..c1cc5d492d --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteMessageBus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteMessageBusRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteMessageBus_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_sync.py new file mode 100755 index 0000000000..eca880cead --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_message_bus_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteMessageBus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteMessageBusRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteMessageBus_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_async.py new file mode 100755 index 0000000000..990b4192b9 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeletePipeline_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeletePipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeletePipeline_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_sync.py new file mode 100755 index 0000000000..f22af70866 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_pipeline_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeletePipeline_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeletePipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeletePipeline_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py index 3f11794329..6dc9a3fcec 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -41,7 +41,6 @@ async def sample_delete_trigger(): # Initialize request argument(s) request = eventarc_v1.DeleteTriggerRequest( name="name_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py index ced373f705..fcd451757d 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py @@ -41,7 +41,6 @@ def sample_delete_trigger(): # Initialize request argument(s) request = eventarc_v1.DeleteTriggerRequest( name="name_value", - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_async.py new file mode 100755 index 0000000000..7fc1809c0b --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetEnrollment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetEnrollmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_enrollment(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetEnrollment_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_sync.py new file mode 100755 index 0000000000..f79ec3e85e --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_enrollment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetEnrollment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetEnrollmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_enrollment(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetEnrollment_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_async.py new file mode 100755 index 0000000000..f22d4ac7f4 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetGoogleApiSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_google_api_source(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetGoogleApiSource_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_sync.py new file mode 100755 index 0000000000..6b5230aae2 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_api_source_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetGoogleApiSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleApiSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_google_api_source(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetGoogleApiSource_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_async.py new file mode 100755 index 0000000000..7b555b808a --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetMessageBus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetMessageBusRequest( + name="name_value", + ) + + # Make the request + response = await client.get_message_bus(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetMessageBus_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_sync.py new file mode 100755 index 0000000000..15e29dbc73 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_message_bus_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetMessageBus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetMessageBusRequest( + name="name_value", + ) + + # Make the request + response = client.get_message_bus(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetMessageBus_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_async.py new file mode 100755 index 0000000000..e31d86be0e --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetPipeline_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetPipelineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_pipeline(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetPipeline_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_sync.py new file mode 100755 index 0000000000..18c0501d29 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_pipeline_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetPipeline_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetPipelineRequest( + name="name_value", + ) + + # Make the request + response = client.get_pipeline(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetPipeline_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_async.py new file mode 100755 index 0000000000..c029b484ed --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnrollments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListEnrollments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_enrollments(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_enrollments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListEnrollments_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_sync.py new file mode 100755 index 0000000000..7ad3db45e0 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_enrollments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnrollments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListEnrollments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_enrollments(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_enrollments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListEnrollments_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_async.py new file mode 100755 index 0000000000..84c8b920a8 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGoogleApiSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListGoogleApiSources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_google_api_sources(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListGoogleApiSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_google_api_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListGoogleApiSources_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_sync.py new file mode 100755 index 0000000000..90b1f13d8e --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_google_api_sources_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGoogleApiSources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListGoogleApiSources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_google_api_sources(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListGoogleApiSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_google_api_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListGoogleApiSources_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_async.py new file mode 100755 index 0000000000..87990245b4 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessageBusEnrollments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListMessageBusEnrollments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_message_bus_enrollments(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_bus_enrollments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListMessageBusEnrollments_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_sync.py new file mode 100755 index 0000000000..db263e5fbe --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_bus_enrollments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessageBusEnrollments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListMessageBusEnrollments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_message_bus_enrollments(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusEnrollmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_bus_enrollments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListMessageBusEnrollments_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_async.py new file mode 100755 index 0000000000..ba99326f8f --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessageBuses +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListMessageBuses_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_message_buses(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_buses(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListMessageBuses_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_sync.py new file mode 100755 index 0000000000..9f8836911a --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_message_buses_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessageBuses +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListMessageBuses_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_message_buses(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListMessageBusesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_message_buses(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListMessageBuses_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_async.py new file mode 100755 index 0000000000..345f2e9cfd --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPipelines +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListPipelines_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_pipelines(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListPipelinesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_pipelines(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListPipelines_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_sync.py new file mode 100755 index 0000000000..8c5102d512 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_pipelines_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPipelines +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListPipelines_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_pipelines(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListPipelinesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_pipelines(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListPipelines_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py index a2ce3afb32..ebbee5e22a 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py @@ -40,7 +40,6 @@ async def sample_update_channel(): # Initialize request argument(s) request = eventarc_v1.UpdateChannelRequest( - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py index aed4767385..792b9eea6a 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py @@ -40,7 +40,6 @@ def sample_update_channel(): # Initialize request argument(s) request = eventarc_v1.UpdateChannelRequest( - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_async.py new file mode 100755 index 0000000000..1fcf073cf5 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateEnrollment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_enrollment(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.UpdateEnrollmentRequest( + enrollment=enrollment, + ) + + # Make the request + operation = client.update_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateEnrollment_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_sync.py new file mode 100755 index 0000000000..17e3560bd0 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_enrollment_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnrollment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateEnrollment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_enrollment(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + enrollment = eventarc_v1.Enrollment() + enrollment.cel_match = "cel_match_value" + enrollment.message_bus = "message_bus_value" + enrollment.destination = "destination_value" + + request = eventarc_v1.UpdateEnrollmentRequest( + enrollment=enrollment, + ) + + # Make the request + operation = client.update_enrollment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateEnrollment_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py new file mode 100755 index 0000000000..657b881875 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_google_api_source(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.UpdateGoogleApiSourceRequest( + google_api_source=google_api_source, + ) + + # Make the request + operation = client.update_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py new file mode 100755 index 0000000000..807940e3b3 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGoogleApiSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_google_api_source(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.destination = "destination_value" + + request = eventarc_v1.UpdateGoogleApiSourceRequest( + google_api_source=google_api_source, + ) + + # Make the request + operation = client.update_google_api_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_async.py new file mode 100755 index 0000000000..558031b649 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateMessageBus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_message_bus(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateMessageBusRequest( + ) + + # Make the request + operation = client.update_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateMessageBus_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_sync.py new file mode 100755 index 0000000000..9d8ec4e50e --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_message_bus_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMessageBus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateMessageBus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_message_bus(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateMessageBusRequest( + ) + + # Make the request + operation = client.update_message_bus(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateMessageBus_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_async.py new file mode 100755 index 0000000000..5177282e03 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdatePipeline_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_pipeline(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.UpdatePipelineRequest( + pipeline=pipeline, + ) + + # Make the request + operation = client.update_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdatePipeline_async] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_sync.py new file mode 100755 index 0000000000..2419e2a177 --- /dev/null +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_pipeline_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdatePipeline_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_pipeline(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + pipeline = eventarc_v1.Pipeline() + pipeline.destinations.http_endpoint.uri = "uri_value" + + request = eventarc_v1.UpdatePipelineRequest( + pipeline=pipeline, + ) + + # Make the request + operation = client.update_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdatePipeline_sync] diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py index 0eba9dd1a6..d960547e69 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -40,7 +40,6 @@ async def sample_update_trigger(): # Initialize request argument(s) request = eventarc_v1.UpdateTriggerRequest( - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py index 4954ec9087..98a5a89aa6 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py +++ b/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py @@ -40,7 +40,6 @@ def sample_update_trigger(): # Initialize request argument(s) request = eventarc_v1.UpdateTriggerRequest( - validate_only=True, ) # Make the request diff --git a/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index d6c04697d5..24b80878b6 100755 --- a/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -245,12 +245,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_CreateChannel_async", "segments": [ { - "end": 62, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 61, "start": 27, "type": "SHORT" }, @@ -260,18 +260,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -333,12 +333,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_CreateChannel_sync", "segments": [ { - "end": 62, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 61, "start": 27, "type": "SHORT" }, @@ -348,18 +348,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -373,30 +373,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_enrollment", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateEnrollment", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "CreateTrigger" + "shortName": "CreateEnrollment" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateEnrollmentRequest" }, { "name": "parent", "type": "str" }, { - "name": "trigger", - "type": "google.cloud.eventarc_v1.types.Trigger" + "name": "enrollment", + "type": "google.cloud.eventarc_v1.types.Enrollment" }, { - "name": "trigger_id", + "name": "enrollment_id", "type": "str" }, { @@ -413,21 +413,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_trigger" + "shortName": "create_enrollment" }, - "description": "Sample for CreateTrigger", - "file": "eventarc_v1_generated_eventarc_create_trigger_async.py", + "description": "Sample for CreateEnrollment", + "file": "eventarc_v1_generated_eventarc_create_enrollment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateEnrollment_async", "segments": [ { - "end": 65, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 62, "start": 27, "type": "SHORT" }, @@ -437,22 +437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_create_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_enrollment_async.py" }, { "canonical": true, @@ -461,30 +461,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.create_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_enrollment", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateEnrollment", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "CreateTrigger" + "shortName": "CreateEnrollment" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateEnrollmentRequest" }, { "name": "parent", "type": "str" }, { - "name": "trigger", - "type": "google.cloud.eventarc_v1.types.Trigger" + "name": "enrollment", + "type": "google.cloud.eventarc_v1.types.Enrollment" }, { - "name": "trigger_id", + "name": "enrollment_id", "type": "str" }, { @@ -501,21 +501,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_trigger" + "shortName": "create_enrollment" }, - "description": "Sample for CreateTrigger", - "file": "eventarc_v1_generated_eventarc_create_trigger_sync.py", + "description": "Sample for CreateEnrollment", + "file": "eventarc_v1_generated_eventarc_create_enrollment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateEnrollment_sync", "segments": [ { - "end": 65, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 62, "start": 27, "type": "SHORT" }, @@ -525,22 +525,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_create_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_enrollment_sync.py" }, { "canonical": true, @@ -550,22 +550,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel_connection", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_google_api_source", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateGoogleApiSource", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteChannelConnection" + "shortName": "CreateGoogleApiSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" + "type": "google.cloud.eventarc_v1.types.CreateGoogleApiSourceRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "google_api_source", + "type": "google.cloud.eventarc_v1.types.GoogleApiSource" + }, + { + "name": "google_api_source_id", "type": "str" }, { @@ -582,21 +590,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_channel_connection" + "shortName": "create_google_api_source" }, - "description": "Sample for DeleteChannelConnection", - "file": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py", + "description": "Sample for CreateGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_create_google_api_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateGoogleApiSource_async", "segments": [ { - "end": 55, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 60, "start": 27, "type": "SHORT" }, @@ -606,22 +614,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py" + "title": "eventarc_v1_generated_eventarc_create_google_api_source_async.py" }, { "canonical": true, @@ -630,22 +638,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_google_api_source", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateGoogleApiSource", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteChannelConnection" + "shortName": "CreateGoogleApiSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" + "type": "google.cloud.eventarc_v1.types.CreateGoogleApiSourceRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "google_api_source", + "type": "google.cloud.eventarc_v1.types.GoogleApiSource" + }, + { + "name": "google_api_source_id", "type": "str" }, { @@ -662,21 +678,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_channel_connection" + "shortName": "create_google_api_source" }, - "description": "Sample for DeleteChannelConnection", - "file": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py", + "description": "Sample for CreateGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_create_google_api_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateGoogleApiSource_sync", "segments": [ { - "end": 55, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 60, "start": 27, "type": "SHORT" }, @@ -686,22 +702,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py" + "title": "eventarc_v1_generated_eventarc_create_google_api_source_sync.py" }, { "canonical": true, @@ -711,22 +727,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_message_bus", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateMessageBus", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteChannel" + "shortName": "CreateMessageBus" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" + "type": "google.cloud.eventarc_v1.types.CreateMessageBusRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "message_bus", + "type": "google.cloud.eventarc_v1.types.MessageBus" + }, + { + "name": "message_bus_id", "type": "str" }, { @@ -743,13 +767,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_channel" + "shortName": "create_message_bus" }, - "description": "Sample for DeleteChannel", - "file": "eventarc_v1_generated_eventarc_delete_channel_async.py", + "description": "Sample for CreateMessageBus", + "file": "eventarc_v1_generated_eventarc_create_message_bus_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateMessageBus_async", "segments": [ { "end": 56, @@ -782,7 +806,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_channel_async.py" + "title": "eventarc_v1_generated_eventarc_create_message_bus_async.py" }, { "canonical": true, @@ -791,22 +815,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_message_bus", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateMessageBus", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteChannel" + "shortName": "CreateMessageBus" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" + "type": "google.cloud.eventarc_v1.types.CreateMessageBusRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "message_bus", + "type": "google.cloud.eventarc_v1.types.MessageBus" + }, + { + "name": "message_bus_id", "type": "str" }, { @@ -823,13 +855,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_channel" + "shortName": "create_message_bus" }, - "description": "Sample for DeleteChannel", - "file": "eventarc_v1_generated_eventarc_delete_channel_sync.py", + "description": "Sample for CreateMessageBus", + "file": "eventarc_v1_generated_eventarc_create_message_bus_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateMessageBus_sync", "segments": [ { "end": 56, @@ -862,7 +894,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_channel_sync.py" + "title": "eventarc_v1_generated_eventarc_create_message_bus_sync.py" }, { "canonical": true, @@ -872,27 +904,31 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_pipeline", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreatePipeline", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteTrigger" + "shortName": "CreatePipeline" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreatePipelineRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "allow_missing", - "type": "bool" + "name": "pipeline", + "type": "google.cloud.eventarc_v1.types.Pipeline" + }, + { + "name": "pipeline_id", + "type": "str" }, { "name": "retry", @@ -908,21 +944,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_trigger" + "shortName": "create_pipeline" }, - "description": "Sample for DeleteTrigger", - "file": "eventarc_v1_generated_eventarc_delete_trigger_async.py", + "description": "Sample for CreatePipeline", + "file": "eventarc_v1_generated_eventarc_create_pipeline_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreatePipeline_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -932,22 +968,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_pipeline_async.py" }, { "canonical": true, @@ -956,27 +992,31 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_pipeline", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreatePipeline", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteTrigger" + "shortName": "CreatePipeline" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreatePipelineRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "allow_missing", - "type": "bool" + "name": "pipeline", + "type": "google.cloud.eventarc_v1.types.Pipeline" + }, + { + "name": "pipeline_id", + "type": "str" }, { "name": "retry", @@ -992,21 +1032,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_trigger" + "shortName": "create_pipeline" }, - "description": "Sample for DeleteTrigger", - "file": "eventarc_v1_generated_eventarc_delete_trigger_sync.py", + "description": "Sample for CreatePipeline", + "file": "eventarc_v1_generated_eventarc_create_pipeline_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreatePipeline_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -1016,22 +1056,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_pipeline_sync.py" }, { "canonical": true, @@ -1041,22 +1081,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel_connection", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_trigger", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetChannelConnection" + "shortName": "CreateTrigger" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", "type": "str" }, { @@ -1072,22 +1120,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", - "shortName": "get_channel_connection" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_trigger" }, - "description": "Sample for GetChannelConnection", - "file": "eventarc_v1_generated_eventarc_get_channel_connection_async.py", + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", "segments": [ { - "end": 51, + "end": 64, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 64, "start": 27, "type": "SHORT" }, @@ -1097,22 +1145,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 54, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 61, + "start": 55, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 65, + "start": 62, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_channel_connection_async.py" + "title": "eventarc_v1_generated_eventarc_create_trigger_async.py" }, { "canonical": true, @@ -1121,22 +1169,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_trigger", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetChannelConnection" + "shortName": "CreateTrigger" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", "type": "str" }, { @@ -1152,22 +1208,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", - "shortName": "get_channel_connection" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_trigger" }, - "description": "Sample for GetChannelConnection", - "file": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py", + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", "segments": [ { - "end": 51, + "end": 64, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 64, "start": 27, "type": "SHORT" }, @@ -1177,22 +1233,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 54, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 61, + "start": 55, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 65, + "start": 62, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py" + "title": "eventarc_v1_generated_eventarc_create_trigger_sync.py" }, { "canonical": true, @@ -1202,19 +1258,19 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetChannel" + "shortName": "DeleteChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" }, { "name": "name", @@ -1233,22 +1289,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.Channel", - "shortName": "get_channel" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_channel_connection" }, - "description": "Sample for GetChannel", - "file": "eventarc_v1_generated_eventarc_get_channel_async.py", + "description": "Sample for DeleteChannelConnection", + "file": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_async", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1263,17 +1319,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_channel_async.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py" }, { "canonical": true, @@ -1282,19 +1338,19 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel", + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetChannel" + "shortName": "DeleteChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" }, { "name": "name", @@ -1313,22 +1369,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.Channel", - "shortName": "get_channel" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_channel_connection" }, - "description": "Sample for GetChannel", - "file": "eventarc_v1_generated_eventarc_get_channel_sync.py", + "description": "Sample for DeleteChannelConnection", + "file": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_sync", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1343,17 +1399,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_channel_sync.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py" }, { "canonical": true, @@ -1363,19 +1419,19 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_google_channel_config", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetGoogleChannelConfig" + "shortName": "DeleteChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" }, { "name": "name", @@ -1394,22 +1450,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", - "shortName": "get_google_channel_config" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_channel" }, - "description": "Sample for GetGoogleChannelConfig", - "file": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py", + "description": "Sample for DeleteChannel", + "file": "eventarc_v1_generated_eventarc_delete_channel_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1424,17 +1480,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_async.py" }, { "canonical": true, @@ -1443,19 +1499,19 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetGoogleChannelConfig" + "shortName": "DeleteChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" }, { "name": "name", @@ -1474,22 +1530,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", - "shortName": "get_google_channel_config" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_channel" }, - "description": "Sample for GetGoogleChannelConfig", - "file": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py", + "description": "Sample for DeleteChannel", + "file": "eventarc_v1_generated_eventarc_delete_channel_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1504,17 +1560,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_sync.py" }, { "canonical": true, @@ -1524,24 +1580,28 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_provider", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_enrollment", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteEnrollment", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetProvider" + "shortName": "DeleteEnrollment" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + "type": "google.cloud.eventarc_v1.types.DeleteEnrollmentRequest" }, { "name": "name", "type": "str" }, + { + "name": "etag", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1555,22 +1615,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.Provider", - "shortName": "get_provider" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_enrollment" }, - "description": "Sample for GetProvider", - "file": "eventarc_v1_generated_eventarc_get_provider_async.py", + "description": "Sample for DeleteEnrollment", + "file": "eventarc_v1_generated_eventarc_delete_enrollment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_async", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteEnrollment_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1585,17 +1645,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_provider_async.py" + "title": "eventarc_v1_generated_eventarc_delete_enrollment_async.py" }, { "canonical": true, @@ -1604,24 +1664,28 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.get_provider", + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_enrollment", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteEnrollment", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetProvider" + "shortName": "DeleteEnrollment" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + "type": "google.cloud.eventarc_v1.types.DeleteEnrollmentRequest" }, { "name": "name", "type": "str" }, + { + "name": "etag", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1635,22 +1699,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.Provider", - "shortName": "get_provider" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_enrollment" }, - "description": "Sample for GetProvider", - "file": "eventarc_v1_generated_eventarc_get_provider_sync.py", + "description": "Sample for DeleteEnrollment", + "file": "eventarc_v1_generated_eventarc_delete_enrollment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_sync", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteEnrollment_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1665,17 +1729,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_provider_sync.py" + "title": "eventarc_v1_generated_eventarc_delete_enrollment_sync.py" }, { "canonical": true, @@ -1685,24 +1749,28 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_google_api_source", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteGoogleApiSource", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetTrigger" + "shortName": "DeleteGoogleApiSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + "type": "google.cloud.eventarc_v1.types.DeleteGoogleApiSourceRequest" }, { "name": "name", "type": "str" }, + { + "name": "etag", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1716,22 +1784,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.Trigger", - "shortName": "get_trigger" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_google_api_source" }, - "description": "Sample for GetTrigger", - "file": "eventarc_v1_generated_eventarc_get_trigger_async.py", + "description": "Sample for DeleteGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_delete_google_api_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteGoogleApiSource_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1746,17 +1814,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_delete_google_api_source_async.py" }, { "canonical": true, @@ -1765,24 +1833,28 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.get_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_google_api_source", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteGoogleApiSource", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetTrigger" + "shortName": "DeleteGoogleApiSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + "type": "google.cloud.eventarc_v1.types.DeleteGoogleApiSourceRequest" }, { "name": "name", "type": "str" }, + { + "name": "etag", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1796,22 +1868,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.Trigger", - "shortName": "get_trigger" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_google_api_source" }, - "description": "Sample for GetTrigger", - "file": "eventarc_v1_generated_eventarc_get_trigger_sync.py", + "description": "Sample for DeleteGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_delete_google_api_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteGoogleApiSource_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1826,17 +1898,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_delete_google_api_source_sync.py" }, { "canonical": true, @@ -1846,27 +1918,3271 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channel_connections", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_message_bus", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteMessageBus", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListChannelConnections" + "shortName": "DeleteMessageBus" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + "type": "google.cloud.eventarc_v1.types.DeleteMessageBusRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" + "name": "etag", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_message_bus" + }, + "description": "Sample for DeleteMessageBus", + "file": "eventarc_v1_generated_eventarc_delete_message_bus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteMessageBus_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_message_bus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_message_bus", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteMessageBus", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteMessageBus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteMessageBusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "etag", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_message_bus" + }, + "description": "Sample for DeleteMessageBus", + "file": "eventarc_v1_generated_eventarc_delete_message_bus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteMessageBus_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_message_bus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_pipeline", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeletePipeline", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeletePipeline" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeletePipelineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "etag", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_pipeline" + }, + "description": "Sample for DeletePipeline", + "file": "eventarc_v1_generated_eventarc_delete_pipeline_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeletePipeline_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_pipeline_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_pipeline", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeletePipeline", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeletePipeline" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeletePipelineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "etag", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_pipeline" + }, + "description": "Sample for DeletePipeline", + "file": "eventarc_v1_generated_eventarc_delete_pipeline_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeletePipeline_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_pipeline_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel_connection", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannelConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", + "shortName": "get_channel_connection" + }, + "description": "Sample for GetChannelConnection", + "file": "eventarc_v1_generated_eventarc_get_channel_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannelConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", + "shortName": "get_channel_connection" + }, + "description": "Sample for GetChannelConnection", + "file": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Channel", + "shortName": "get_channel" + }, + "description": "Sample for GetChannel", + "file": "eventarc_v1_generated_eventarc_get_channel_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Channel", + "shortName": "get_channel" + }, + "description": "Sample for GetChannel", + "file": "eventarc_v1_generated_eventarc_get_channel_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_enrollment", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetEnrollment", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetEnrollment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetEnrollmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Enrollment", + "shortName": "get_enrollment" + }, + "description": "Sample for GetEnrollment", + "file": "eventarc_v1_generated_eventarc_get_enrollment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetEnrollment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_enrollment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_enrollment", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetEnrollment", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetEnrollment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetEnrollmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Enrollment", + "shortName": "get_enrollment" + }, + "description": "Sample for GetEnrollment", + "file": "eventarc_v1_generated_eventarc_get_enrollment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetEnrollment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_enrollment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_google_api_source", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleApiSource", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleApiSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleApiSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleApiSource", + "shortName": "get_google_api_source" + }, + "description": "Sample for GetGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_get_google_api_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleApiSource_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_api_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_google_api_source", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleApiSource", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleApiSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleApiSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleApiSource", + "shortName": "get_google_api_source" + }, + "description": "Sample for GetGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_get_google_api_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleApiSource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_api_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "get_google_channel_config" + }, + "description": "Sample for GetGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "get_google_channel_config" + }, + "description": "Sample for GetGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_message_bus", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetMessageBus", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetMessageBus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetMessageBusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.MessageBus", + "shortName": "get_message_bus" + }, + "description": "Sample for GetMessageBus", + "file": "eventarc_v1_generated_eventarc_get_message_bus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetMessageBus_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_message_bus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_message_bus", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetMessageBus", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetMessageBus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetMessageBusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.MessageBus", + "shortName": "get_message_bus" + }, + "description": "Sample for GetMessageBus", + "file": "eventarc_v1_generated_eventarc_get_message_bus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetMessageBus_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_message_bus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_pipeline", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetPipeline", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetPipeline" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetPipelineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Pipeline", + "shortName": "get_pipeline" + }, + "description": "Sample for GetPipeline", + "file": "eventarc_v1_generated_eventarc_get_pipeline_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetPipeline_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_pipeline_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_pipeline", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetPipeline", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetPipeline" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetPipelineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Pipeline", + "shortName": "get_pipeline" + }, + "description": "Sample for GetPipeline", + "file": "eventarc_v1_generated_eventarc_get_pipeline_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetPipeline_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_pipeline_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_provider", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetProvider" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Provider", + "shortName": "get_provider" + }, + "description": "Sample for GetProvider", + "file": "eventarc_v1_generated_eventarc_get_provider_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_provider_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_provider", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetProvider" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Provider", + "shortName": "get_provider" + }, + "description": "Sample for GetProvider", + "file": "eventarc_v1_generated_eventarc_get_provider_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_provider_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channel_connections", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannelConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager", + "shortName": "list_channel_connections" + }, + "description": "Sample for ListChannelConnections", + "file": "eventarc_v1_generated_eventarc_list_channel_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channel_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channel_connections", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannelConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager", + "shortName": "list_channel_connections" + }, + "description": "Sample for ListChannelConnections", + "file": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channels", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager", + "shortName": "list_channels" + }, + "description": "Sample for ListChannels", + "file": "eventarc_v1_generated_eventarc_list_channels_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channels_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channels", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager", + "shortName": "list_channels" + }, + "description": "Sample for ListChannels", + "file": "eventarc_v1_generated_eventarc_list_channels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_enrollments", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListEnrollments", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListEnrollments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListEnrollmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListEnrollmentsAsyncPager", + "shortName": "list_enrollments" + }, + "description": "Sample for ListEnrollments", + "file": "eventarc_v1_generated_eventarc_list_enrollments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListEnrollments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_enrollments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_enrollments", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListEnrollments", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListEnrollments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListEnrollmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListEnrollmentsPager", + "shortName": "list_enrollments" + }, + "description": "Sample for ListEnrollments", + "file": "eventarc_v1_generated_eventarc_list_enrollments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListEnrollments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_enrollments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_google_api_sources", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListGoogleApiSources", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListGoogleApiSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListGoogleApiSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListGoogleApiSourcesAsyncPager", + "shortName": "list_google_api_sources" + }, + "description": "Sample for ListGoogleApiSources", + "file": "eventarc_v1_generated_eventarc_list_google_api_sources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListGoogleApiSources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_google_api_sources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_google_api_sources", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListGoogleApiSources", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListGoogleApiSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListGoogleApiSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListGoogleApiSourcesPager", + "shortName": "list_google_api_sources" + }, + "description": "Sample for ListGoogleApiSources", + "file": "eventarc_v1_generated_eventarc_list_google_api_sources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListGoogleApiSources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_google_api_sources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_message_bus_enrollments", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListMessageBusEnrollments", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListMessageBusEnrollments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusEnrollmentsAsyncPager", + "shortName": "list_message_bus_enrollments" + }, + "description": "Sample for ListMessageBusEnrollments", + "file": "eventarc_v1_generated_eventarc_list_message_bus_enrollments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListMessageBusEnrollments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_message_bus_enrollments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_message_bus_enrollments", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListMessageBusEnrollments", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListMessageBusEnrollments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListMessageBusEnrollmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusEnrollmentsPager", + "shortName": "list_message_bus_enrollments" + }, + "description": "Sample for ListMessageBusEnrollments", + "file": "eventarc_v1_generated_eventarc_list_message_bus_enrollments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListMessageBusEnrollments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_message_bus_enrollments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_message_buses", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListMessageBuses", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListMessageBuses" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListMessageBusesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusesAsyncPager", + "shortName": "list_message_buses" + }, + "description": "Sample for ListMessageBuses", + "file": "eventarc_v1_generated_eventarc_list_message_buses_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListMessageBuses_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_message_buses_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_message_buses", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListMessageBuses", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListMessageBuses" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListMessageBusesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListMessageBusesPager", + "shortName": "list_message_buses" + }, + "description": "Sample for ListMessageBuses", + "file": "eventarc_v1_generated_eventarc_list_message_buses_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListMessageBuses_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_message_buses_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_pipelines", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListPipelines", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListPipelines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListPipelinesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListPipelinesAsyncPager", + "shortName": "list_pipelines" + }, + "description": "Sample for ListPipelines", + "file": "eventarc_v1_generated_eventarc_list_pipelines_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListPipelines_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_pipelines_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_pipelines", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListPipelines", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListPipelines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListPipelinesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListPipelinesPager", + "shortName": "list_pipelines" + }, + "description": "Sample for ListPipelines", + "file": "eventarc_v1_generated_eventarc_list_pipelines_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListPipelines_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_pipelines_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_providers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListProviders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager", + "shortName": "list_providers" + }, + "description": "Sample for ListProviders", + "file": "eventarc_v1_generated_eventarc_list_providers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_providers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_providers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListProviders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager", + "shortName": "list_providers" + }, + "description": "Sample for ListProviders", + "file": "eventarc_v1_generated_eventarc_list_providers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_providers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_triggers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" }, { "name": "timeout", @@ -1877,14 +5193,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager", - "shortName": "list_channel_connections" + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", + "shortName": "list_triggers" }, - "description": "Sample for ListChannelConnections", - "file": "eventarc_v1_generated_eventarc_list_channel_connections_async.py", + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_async", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", "segments": [ { "end": 52, @@ -1917,7 +5233,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_channel_connections_async.py" + "title": "eventarc_v1_generated_eventarc_list_triggers_async.py" }, { "canonical": true, @@ -1926,19 +5242,19 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channel_connections", + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_triggers", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListChannelConnections" + "shortName": "ListTriggers" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" }, { "name": "parent", @@ -1957,14 +5273,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager", - "shortName": "list_channel_connections" + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", + "shortName": "list_triggers" }, - "description": "Sample for ListChannelConnections", - "file": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py", + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_sync", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", "segments": [ { "end": 52, @@ -1997,7 +5313,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py" + "title": "eventarc_v1_generated_eventarc_list_triggers_sync.py" }, { "canonical": true, @@ -2007,23 +5323,27 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channels", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListChannels" + "shortName": "UpdateChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" }, { - "name": "parent", - "type": "str" + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2038,22 +5358,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager", - "shortName": "list_channels" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_channel" }, - "description": "Sample for ListChannels", - "file": "eventarc_v1_generated_eventarc_list_channels_async.py", + "description": "Sample for UpdateChannel", + "file": "eventarc_v1_generated_eventarc_update_channel_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_async", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2063,22 +5383,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_channels_async.py" + "title": "eventarc_v1_generated_eventarc_update_channel_async.py" }, { "canonical": true, @@ -2087,23 +5407,27 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channels", + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListChannels" + "shortName": "UpdateChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" }, { - "name": "parent", - "type": "str" + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2118,22 +5442,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager", - "shortName": "list_channels" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_channel" }, - "description": "Sample for ListChannels", - "file": "eventarc_v1_generated_eventarc_list_channels_sync.py", + "description": "Sample for UpdateChannel", + "file": "eventarc_v1_generated_eventarc_update_channel_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_sync", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2143,22 +5467,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_channels_sync.py" + "title": "eventarc_v1_generated_eventarc_update_channel_sync.py" }, { "canonical": true, @@ -2168,23 +5492,196 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_providers", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_enrollment", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateEnrollment", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListProviders" + "shortName": "UpdateEnrollment" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + "type": "google.cloud.eventarc_v1.types.UpdateEnrollmentRequest" + }, + { + "name": "enrollment", + "type": "google.cloud.eventarc_v1.types.Enrollment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_enrollment" + }, + "description": "Sample for UpdateEnrollment", + "file": "eventarc_v1_generated_eventarc_update_enrollment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateEnrollment_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_enrollment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_enrollment", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateEnrollment", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateEnrollment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateEnrollmentRequest" + }, + { + "name": "enrollment", + "type": "google.cloud.eventarc_v1.types.Enrollment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_enrollment" + }, + "description": "Sample for UpdateEnrollment", + "file": "eventarc_v1_generated_eventarc_update_enrollment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateEnrollment_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_enrollment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_google_api_source", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleApiSource", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateGoogleApiSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateGoogleApiSourceRequest" + }, + { + "name": "google_api_source", + "type": "google.cloud.eventarc_v1.types.GoogleApiSource" }, { - "name": "parent", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2199,22 +5696,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager", - "shortName": "list_providers" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_google_api_source" }, - "description": "Sample for ListProviders", - "file": "eventarc_v1_generated_eventarc_list_providers_async.py", + "description": "Sample for UpdateGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_update_google_api_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_async", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_async", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -2224,22 +5721,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_providers_async.py" + "title": "eventarc_v1_generated_eventarc_update_google_api_source_async.py" }, { "canonical": true, @@ -2248,23 +5745,27 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.list_providers", + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_google_api_source", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleApiSource", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListProviders" + "shortName": "UpdateGoogleApiSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + "type": "google.cloud.eventarc_v1.types.UpdateGoogleApiSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "google_api_source", + "type": "google.cloud.eventarc_v1.types.GoogleApiSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2279,22 +5780,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager", - "shortName": "list_providers" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_google_api_source" }, - "description": "Sample for ListProviders", - "file": "eventarc_v1_generated_eventarc_list_providers_sync.py", + "description": "Sample for UpdateGoogleApiSource", + "file": "eventarc_v1_generated_eventarc_update_google_api_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_sync", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_sync", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -2304,22 +5805,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_providers_sync.py" + "title": "eventarc_v1_generated_eventarc_update_google_api_source_sync.py" }, { "canonical": true, @@ -2329,23 +5830,27 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_triggers", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_google_channel_config", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListTriggers" + "shortName": "UpdateGoogleChannelConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" }, { - "name": "parent", - "type": "str" + "name": "google_channel_config", + "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2360,22 +5865,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", - "shortName": "list_triggers" + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "update_google_channel_config" }, - "description": "Sample for ListTriggers", - "file": "eventarc_v1_generated_eventarc_list_triggers_async.py", + "description": "Sample for UpdateGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2385,22 +5890,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_triggers_async.py" + "title": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py" }, { "canonical": true, @@ -2409,23 +5914,27 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.list_triggers", + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListTriggers" + "shortName": "UpdateGoogleChannelConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" }, { - "name": "parent", - "type": "str" + "name": "google_channel_config", + "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2440,22 +5949,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", - "shortName": "list_triggers" + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "update_google_channel_config" }, - "description": "Sample for ListTriggers", - "file": "eventarc_v1_generated_eventarc_list_triggers_sync.py", + "description": "Sample for UpdateGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2465,22 +5974,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_triggers_sync.py" + "title": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py" }, { "canonical": true, @@ -2490,23 +5999,23 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_channel", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_message_bus", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateMessageBus", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "UpdateChannel" + "shortName": "UpdateMessageBus" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" + "type": "google.cloud.eventarc_v1.types.UpdateMessageBusRequest" }, { - "name": "channel", - "type": "google.cloud.eventarc_v1.types.Channel" + "name": "message_bus", + "type": "google.cloud.eventarc_v1.types.MessageBus" }, { "name": "update_mask", @@ -2526,21 +6035,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_channel" + "shortName": "update_message_bus" }, - "description": "Sample for UpdateChannel", - "file": "eventarc_v1_generated_eventarc_update_channel_async.py", + "description": "Sample for UpdateMessageBus", + "file": "eventarc_v1_generated_eventarc_update_message_bus_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_async", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateMessageBus_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2550,22 +6059,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_update_channel_async.py" + "title": "eventarc_v1_generated_eventarc_update_message_bus_async.py" }, { "canonical": true, @@ -2574,23 +6083,23 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.update_channel", + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_message_bus", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateMessageBus", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "UpdateChannel" + "shortName": "UpdateMessageBus" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" + "type": "google.cloud.eventarc_v1.types.UpdateMessageBusRequest" }, { - "name": "channel", - "type": "google.cloud.eventarc_v1.types.Channel" + "name": "message_bus", + "type": "google.cloud.eventarc_v1.types.MessageBus" }, { "name": "update_mask", @@ -2610,21 +6119,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "update_channel" + "shortName": "update_message_bus" }, - "description": "Sample for UpdateChannel", - "file": "eventarc_v1_generated_eventarc_update_channel_sync.py", + "description": "Sample for UpdateMessageBus", + "file": "eventarc_v1_generated_eventarc_update_message_bus_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_sync", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateMessageBus_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2634,22 +6143,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_update_channel_sync.py" + "title": "eventarc_v1_generated_eventarc_update_message_bus_sync.py" }, { "canonical": true, @@ -2659,23 +6168,23 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_google_channel_config", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_pipeline", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdatePipeline", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "UpdateGoogleChannelConfig" + "shortName": "UpdatePipeline" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" + "type": "google.cloud.eventarc_v1.types.UpdatePipelineRequest" }, { - "name": "google_channel_config", - "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + "name": "pipeline", + "type": "google.cloud.eventarc_v1.types.Pipeline" }, { "name": "update_mask", @@ -2694,22 +6203,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", - "shortName": "update_google_channel_config" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_pipeline" }, - "description": "Sample for UpdateGoogleChannelConfig", - "file": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py", + "description": "Sample for UpdatePipeline", + "file": "eventarc_v1_generated_eventarc_update_pipeline_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async", + "regionTag": "eventarc_v1_generated_Eventarc_UpdatePipeline_async", "segments": [ { - "end": 54, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 58, "start": 27, "type": "SHORT" }, @@ -2724,17 +6233,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 51, + "end": 55, "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py" + "title": "eventarc_v1_generated_eventarc_update_pipeline_async.py" }, { "canonical": true, @@ -2743,23 +6252,23 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_pipeline", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdatePipeline", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "UpdateGoogleChannelConfig" + "shortName": "UpdatePipeline" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" + "type": "google.cloud.eventarc_v1.types.UpdatePipelineRequest" }, { - "name": "google_channel_config", - "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + "name": "pipeline", + "type": "google.cloud.eventarc_v1.types.Pipeline" }, { "name": "update_mask", @@ -2778,22 +6287,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", - "shortName": "update_google_channel_config" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_pipeline" }, - "description": "Sample for UpdateGoogleChannelConfig", - "file": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py", + "description": "Sample for UpdatePipeline", + "file": "eventarc_v1_generated_eventarc_update_pipeline_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync", + "regionTag": "eventarc_v1_generated_Eventarc_UpdatePipeline_sync", "segments": [ { - "end": 54, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 58, "start": 27, "type": "SHORT" }, @@ -2808,17 +6317,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 51, + "end": 55, "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py" + "title": "eventarc_v1_generated_eventarc_update_pipeline_sync.py" }, { "canonical": true, @@ -2877,12 +6386,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_UpdateTrigger_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2892,18 +6401,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -2965,12 +6474,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_UpdateTrigger_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2980,18 +6489,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], diff --git a/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py index e7d3db475b..693e133ecc 100755 --- a/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py +++ b/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py @@ -41,22 +41,43 @@ class eventarcCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_channel': ('parent', 'channel', 'channel_id', 'validate_only', ), 'create_channel_connection': ('parent', 'channel_connection', 'channel_connection_id', ), + 'create_enrollment': ('parent', 'enrollment', 'enrollment_id', 'validate_only', ), + 'create_google_api_source': ('parent', 'google_api_source', 'google_api_source_id', 'validate_only', ), + 'create_message_bus': ('parent', 'message_bus', 'message_bus_id', 'validate_only', ), + 'create_pipeline': ('parent', 'pipeline', 'pipeline_id', 'validate_only', ), 'create_trigger': ('parent', 'trigger', 'trigger_id', 'validate_only', ), 'delete_channel': ('name', 'validate_only', ), 'delete_channel_connection': ('name', ), - 'delete_trigger': ('name', 'validate_only', 'etag', 'allow_missing', ), + 'delete_enrollment': ('name', 'etag', 'allow_missing', 'validate_only', ), + 'delete_google_api_source': ('name', 'etag', 'allow_missing', 'validate_only', ), + 'delete_message_bus': ('name', 'etag', 'allow_missing', 'validate_only', ), + 'delete_pipeline': ('name', 'etag', 'allow_missing', 'validate_only', ), + 'delete_trigger': ('name', 'etag', 'allow_missing', 'validate_only', ), 'get_channel': ('name', ), 'get_channel_connection': ('name', ), + 'get_enrollment': ('name', ), + 'get_google_api_source': ('name', ), 'get_google_channel_config': ('name', ), + 'get_message_bus': ('name', ), + 'get_pipeline': ('name', ), 'get_provider': ('name', ), 'get_trigger': ('name', ), 'list_channel_connections': ('parent', 'page_size', 'page_token', ), 'list_channels': ('parent', 'page_size', 'page_token', 'order_by', ), + 'list_enrollments': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'list_google_api_sources': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'list_message_bus_enrollments': ('parent', 'page_size', 'page_token', ), + 'list_message_buses': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'list_pipelines': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), 'list_providers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), 'list_triggers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), - 'update_channel': ('validate_only', 'channel', 'update_mask', ), + 'update_channel': ('channel', 'update_mask', 'validate_only', ), + 'update_enrollment': ('enrollment', 'update_mask', 'allow_missing', 'validate_only', ), + 'update_google_api_source': ('google_api_source', 'update_mask', 'allow_missing', 'validate_only', ), 'update_google_channel_config': ('google_channel_config', 'update_mask', ), - 'update_trigger': ('validate_only', 'trigger', 'update_mask', 'allow_missing', ), + 'update_message_bus': ('message_bus', 'update_mask', 'allow_missing', 'validate_only', ), + 'update_pipeline': ('pipeline', 'update_mask', 'allow_missing', 'validate_only', ), + 'update_trigger': ('trigger', 'update_mask', 'allow_missing', 'validate_only', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 66ac1829f1..46f0ce10b1 100755 --- a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -64,9 +64,19 @@ from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import enrollment +from google.cloud.eventarc_v1.types import enrollment as gce_enrollment from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_api_source +from google.cloud.eventarc_v1.types import google_api_source as gce_google_api_source from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import logging_config +from google.cloud.eventarc_v1.types import message_bus +from google.cloud.eventarc_v1.types import message_bus as gce_message_bus +from google.cloud.eventarc_v1.types import network_config +from google.cloud.eventarc_v1.types import pipeline +from google.cloud.eventarc_v1.types import pipeline as gce_pipeline from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 @@ -75,6 +85,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import code_pb2 # type: ignore @@ -807,6 +818,8 @@ def test_get_trigger(request_type, transport: str = 'grpc'): uid='uid_value', service_account='service_account_value', channel='channel_value', + event_data_content_type='event_data_content_type_value', + satisfies_pzs=True, etag='etag_value', ) response = client.get_trigger(request) @@ -823,6 +836,8 @@ def test_get_trigger(request_type, transport: str = 'grpc'): assert response.uid == 'uid_value' assert response.service_account == 'service_account_value' assert response.channel == 'channel_value' + assert response.event_data_content_type == 'event_data_content_type_value' + assert response.satisfies_pzs is True assert response.etag == 'etag_value' @@ -940,6 +955,8 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e uid='uid_value', service_account='service_account_value', channel='channel_value', + event_data_content_type='event_data_content_type_value', + satisfies_pzs=True, etag='etag_value', )) response = await client.get_trigger(request) @@ -956,6 +973,8 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e assert response.uid == 'uid_value' assert response.service_account == 'service_account_value' assert response.channel == 'channel_value' + assert response.event_data_content_type == 'event_data_content_type_value' + assert response.satisfies_pzs is True assert response.etag == 'etag_value' @@ -2652,6 +2671,7 @@ def test_get_channel(request_type, transport: str = 'grpc'): state=channel.Channel.State.PENDING, activation_token='activation_token_value', crypto_key_name='crypto_key_name_value', + satisfies_pzs=True, pubsub_topic='pubsub_topic_value', ) response = client.get_channel(request) @@ -2670,6 +2690,7 @@ def test_get_channel(request_type, transport: str = 'grpc'): assert response.state == channel.Channel.State.PENDING assert response.activation_token == 'activation_token_value' assert response.crypto_key_name == 'crypto_key_name_value' + assert response.satisfies_pzs is True def test_get_channel_non_empty_request_with_auto_populated_field(): @@ -2788,6 +2809,7 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e state=channel.Channel.State.PENDING, activation_token='activation_token_value', crypto_key_name='crypto_key_name_value', + satisfies_pzs=True, )) response = await client.get_channel(request) @@ -2805,6 +2827,7 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e assert response.state == channel.Channel.State.PENDING assert response.activation_token == 'activation_token_value' assert response.crypto_key_name == 'crypto_key_name_value' + assert response.satisfies_pzs is True @pytest.mark.asyncio @@ -7400,13 +7423,83 @@ async def test_update_google_channel_config_flattened_error_async(): ) -def test_get_trigger_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize("request_type", [ + eventarc.GetMessageBusRequest, + dict, +]) +def test_get_message_bus(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = message_bus.MessageBus( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + crypto_key_name='crypto_key_name_value', + ) + response = client.get_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.GetMessageBusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, message_bus.MessageBus) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_message_bus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetMessageBusRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_message_bus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetMessageBusRequest( + name='name_value', + ) + +def test_get_message_bus_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -7414,165 +7507,324 @@ def test_get_trigger_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_trigger in client._transport._wrapped_methods + assert client._transport.get_message_bus in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_message_bus] = mock_rpc request = {} - client.get_trigger(request) + client.get_message_bus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_trigger(request) + client.get_message_bus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_get_message_bus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.get_message_bus in client._client._transport._wrapped_methods - # verify fields with default values are dropped + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_message_bus] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.get_message_bus(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - jsonified_request["name"] = 'name_value' + await client.get_message_bus(request) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_message_bus_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetMessageBusRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(message_bus.MessageBus( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.get_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.GetMessageBusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, message_bus.MessageBus) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.crypto_key_name == 'crypto_key_name_value' - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +@pytest.mark.asyncio +async def test_get_message_bus_async_from_dict(): + await test_get_message_bus_async(request_type=dict) + +def test_get_message_bus_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetMessageBusRequest() - response_value = Response() - response_value.status_code = 200 + request.name = 'name_value' - # Convert return value to protobuf type - return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + call.return_value = message_bus.MessageBus() + client.get_message_bus(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - response = client.get_trigger(request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_get_message_bus_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetMessageBusRequest() -def test_get_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + request.name = 'name_value' - unset_fields = transport.get_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(message_bus.MessageBus()) + await client.get_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_get_trigger_rest_flattened(): +def test_get_message_bus_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = message_bus.MessageBus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_message_bus( name='name_value', ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_trigger(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_get_trigger_rest_flattened_error(transport: str = 'rest'): +def test_get_message_bus_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_trigger( - eventarc.GetTriggerRequest(), + client.get_message_bus( + eventarc.GetMessageBusRequest(), name='name_value', ) +@pytest.mark.asyncio +async def test_get_message_bus_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = message_bus.MessageBus() -def test_list_triggers_rest_use_cached_wrapped_rpc(): + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(message_bus.MessageBus()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_message_bus( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_message_bus_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_message_bus( + eventarc.GetMessageBusRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListMessageBusesRequest, + dict, +]) +def test_list_message_buses(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListMessageBusesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_message_buses(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.ListMessageBusesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMessageBusesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_message_buses_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListMessageBusesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_message_buses(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListMessageBusesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_message_buses_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -7580,423 +7832,545 @@ def test_list_triggers_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_triggers in client._transport._wrapped_methods + assert client._transport.list_message_buses in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_message_buses] = mock_rpc request = {} - client.list_triggers(request) + client.list_message_buses(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_triggers(request) + client.list_message_buses(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_list_message_buses_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.list_message_buses in client._client._transport._wrapped_methods - # verify fields with default values are dropped + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_message_buses] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.list_message_buses(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - jsonified_request["parent"] = 'parent_value' + await client.list_message_buses(request) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' +@pytest.mark.asyncio +async def test_list_message_buses_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListMessageBusesRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_message_buses(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.ListMessageBusesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMessageBusesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_message_buses_async_from_dict(): + await test_list_message_buses_async(request_type=dict) +def test_list_message_buses_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListMessageBusesRequest() - response_value = Response() - response_value.status_code = 200 + request.parent = 'parent_value' - # Convert return value to protobuf type - return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + call.return_value = eventarc.ListMessageBusesResponse() + client.list_message_buses(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - response = client.list_triggers(request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_message_buses_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_list_triggers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListMessageBusesRequest() - unset_fields = transport.list_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + request.parent = 'parent_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusesResponse()) + await client.list_message_buses(request) -def test_list_triggers_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_message_buses_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListMessageBusesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_message_buses( + parent='parent_value', + ) - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_list_message_buses_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_message_buses( + eventarc.ListMessageBusesRequest(), parent='parent_value', ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +@pytest.mark.asyncio +async def test_list_message_buses_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.list_triggers(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListMessageBusesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_message_buses( + parent='parent_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val -def test_list_triggers_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_list_message_buses_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_triggers( - eventarc.ListTriggersRequest(), + await client.list_message_buses( + eventarc.ListMessageBusesRequest(), parent='parent_value', ) -def test_list_triggers_rest_pager(transport: str = 'rest'): +def test_list_message_buses_pager(transport_name: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), - trigger.Trigger(), - trigger.Trigger(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + message_bus.MessageBus(), ], next_page_token='abc', ), - eventarc.ListTriggersResponse( - triggers=[], + eventarc.ListMessageBusesResponse( + message_buses=[], next_page_token='def', ), - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), ], next_page_token='ghi', ), - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), - trigger.Trigger(), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_message_buses(request={}, retry=retry, timeout=timeout) - pager = client.list_triggers(request=sample_request) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 - assert all(isinstance(i, trigger.Trigger) - for i in results) + assert all(isinstance(i, message_bus.MessageBus) + for i in results) +def test_list_message_buses_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) - pages = list(client.list_triggers(request=sample_request).pages) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusesResponse( + message_buses=[], + next_page_token='def', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + ), + RuntimeError, + ) + pages = list(client.list_message_buses(request={}).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token +@pytest.mark.asyncio +async def test_list_message_buses_async_pager(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_create_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusesResponse( + message_buses=[], + next_page_token='def', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + ), + RuntimeError, ) + async_pager = await client.list_message_buses(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + assert len(responses) == 6 + assert all(isinstance(i, message_bus.MessageBus) + for i in responses) - # Ensure method has been cached - assert client._transport.create_trigger in client._transport._wrapped_methods - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc +@pytest.mark.asyncio +async def test_list_message_buses_async_pages(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - request = {} - client.create_trigger(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusesResponse( + message_buses=[], + next_page_token='def', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_message_buses(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +@pytest.mark.parametrize("request_type", [ + eventarc.ListMessageBusEnrollmentsRequest, + dict, +]) +def test_list_message_bus_enrollments(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - client.create_trigger(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListMessageBusEnrollmentsResponse( + enrollments=['enrollments_value'], + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_message_bus_enrollments(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.ListMessageBusEnrollmentsRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMessageBusEnrollmentsPager) + assert response.enrollments == ['enrollments_value'] + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] -def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["trigger_id"] = "" - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "triggerId" not in jsonified_request - assert "validateOnly" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == request_init["trigger_id"] - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["triggerId"] = 'trigger_id_value' - jsonified_request["validateOnly"] = True - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("trigger_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True +def test_list_message_bus_enrollments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_trigger(request) - - expected_params = [ - ( - "triggerId", - "", - ), - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) - -def test_create_trigger_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListMessageBusEnrollmentsRequest( + parent='parent_value', + page_token='page_token_value', ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_message_bus_enrollments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListMessageBusEnrollmentsRequest( parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + page_token='page_token_value', ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +def test_list_message_bus_enrollments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - client.create_trigger(**mock_args) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + # Ensure method has been cached + assert client._transport.list_message_bus_enrollments in client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_message_bus_enrollments] = mock_rpc + request = {} + client.list_message_bus_enrollments(request) -def test_create_trigger_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_trigger( - eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', - ) + client.list_message_bus_enrollments(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_update_trigger_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) # Should wrap all calls on client creation @@ -8004,358 +8378,474 @@ def test_update_trigger_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_trigger in client._transport._wrapped_methods + assert client._client._transport.list_message_bus_enrollments in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_message_bus_enrollments] = mock_rpc request = {} - client.update_trigger(request) + await client.list_message_bus_enrollments(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_trigger(request) + await client.list_message_bus_enrollments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListMessageBusEnrollmentsRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusEnrollmentsResponse( + enrollments=['enrollments_value'], + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_message_bus_enrollments(request) - # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.ListMessageBusEnrollmentsRequest() + assert args[0] == request - jsonified_request["validateOnly"] = True + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMessageBusEnrollmentsAsyncPager) + assert response.enrollments == ['enrollments_value'] + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_async_from_dict(): + await test_list_message_bus_enrollments_async(request_type=dict) +def test_list_message_bus_enrollments_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListMessageBusEnrollmentsRequest() - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + request.parent = 'parent_value' - response = client.update_trigger(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + call.return_value = eventarc.ListMessageBusEnrollmentsResponse() + client.list_message_bus_enrollments(request) - expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] -def test_update_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.update_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListMessageBusEnrollmentsRequest() -def test_update_trigger_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + request.parent = 'parent_value' - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusEnrollmentsResponse()) + await client.list_message_bus_enrollments(request) - # get arguments that satisfy an http rule for this method - sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get truthy value for each flattened field - mock_args = dict( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - allow_missing=True, - ) - mock_args.update(sample_request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_trigger(**mock_args) +def test_list_message_bus_enrollments_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListMessageBusEnrollmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_message_bus_enrollments( + parent='parent_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val -def test_update_trigger_rest_flattened_error(transport: str = 'rest'): +def test_list_message_bus_enrollments_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_trigger( - eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - allow_missing=True, + client.list_message_bus_enrollments( + eventarc.ListMessageBusEnrollmentsRequest(), + parent='parent_value', ) +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_delete_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListMessageBusEnrollmentsResponse() - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusEnrollmentsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_message_bus_enrollments( + parent='parent_value', + ) - # Ensure method has been cached - assert client._transport.delete_trigger in client._transport._wrapped_methods + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - request = {} - client.delete_trigger(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_message_bus_enrollments( + eventarc.ListMessageBusEnrollmentsRequest(), + parent='parent_value', + ) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() +def test_list_message_bus_enrollments_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) - client.delete_trigger(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + ], + ), + RuntimeError, + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_message_bus_enrollments(request={}, retry=retry, timeout=timeout) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout -def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): - transport_class = transports.EventarcRestTransport + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) +def test_list_message_bus_enrollments_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) - request_init = {} - request_init["name"] = "" - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_message_bus_enrollments(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token - # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_async_pager(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_message_bus_enrollments(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) - jsonified_request["name"] = 'name_value' - jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_async_pages(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_message_bus_enrollments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize("request_type", [ + eventarc.CreateMessageBusRequest, + dict, +]) +def test_create_message_bus(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_trigger(request) - expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_message_bus(request) -def test_delete_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.CreateMessageBusRequest() + assert args[0] == request - unset_fields = transport.delete_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_delete_trigger_rest_flattened(): +def test_create_message_bus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc', ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - allow_missing=True, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) - - -def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateMessageBusRequest( + parent='parent_value', + message_bus_id='message_bus_id_value', ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_trigger( - eventarc.DeleteTriggerRequest(), - name='name_value', - allow_missing=True, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_message_bus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateMessageBusRequest( + parent='parent_value', + message_bus_id='message_bus_id_value', ) - -def test_get_channel_rest_use_cached_wrapped_rpc(): +def test_create_message_bus_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -8363,165 +8853,334 @@ def test_get_channel_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_channel in client._transport._wrapped_methods + assert client._transport.create_message_bus in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc - + client._transport._wrapped_methods[client._transport.create_message_bus] = mock_rpc request = {} - client.get_channel(request) + client.create_message_bus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_channel(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_message_bus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_create_message_bus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.create_message_bus in client._client._transport._wrapped_methods - # verify fields with default values are dropped + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_message_bus] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.create_message_bus(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - jsonified_request["name"] = 'name_value' + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.create_message_bus(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', +@pytest.mark.asyncio +async def test_create_message_bus_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateMessageBusRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = channel.Channel() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_message_bus(request) - # Convert return value to protobuf type - return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.CreateMessageBusRequest() + assert args[0] == request - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.get_channel(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_message_bus_async_from_dict(): + await test_create_message_bus_async(request_type=dict) +def test_create_message_bus_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_get_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateMessageBusRequest() - unset_fields = transport.get_channel._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + request.parent = 'parent_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_message_bus(request) -def test_get_channel_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_message_bus_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel.Channel() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateMessageBusRequest() - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request.parent = 'parent_value' - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_message_bus(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.get_channel(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_message_bus_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_message_bus( + parent='parent_value', + message_bus=gce_message_bus.MessageBus(name='name_value'), + message_bus_id='message_bus_id_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].message_bus + mock_val = gce_message_bus.MessageBus(name='name_value') + assert arg == mock_val + arg = args[0].message_bus_id + mock_val = 'message_bus_id_value' + assert arg == mock_val -def test_get_channel_rest_flattened_error(transport: str = 'rest'): +def test_create_message_bus_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_channel( - eventarc.GetChannelRequest(), - name='name_value', + client.create_message_bus( + eventarc.CreateMessageBusRequest(), + parent='parent_value', + message_bus=gce_message_bus.MessageBus(name='name_value'), + message_bus_id='message_bus_id_value', ) +@pytest.mark.asyncio +async def test_create_message_bus_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_list_channels_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_message_bus( + parent='parent_value', + message_bus=gce_message_bus.MessageBus(name='name_value'), + message_bus_id='message_bus_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].message_bus + mock_val = gce_message_bus.MessageBus(name='name_value') + assert arg == mock_val + arg = args[0].message_bus_id + mock_val = 'message_bus_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_message_bus_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_message_bus( + eventarc.CreateMessageBusRequest(), + parent='parent_value', + message_bus=gce_message_bus.MessageBus(name='name_value'), + message_bus_id='message_bus_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateMessageBusRequest, + dict, +]) +def test_update_message_bus(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.UpdateMessageBusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_message_bus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.UpdateMessageBusRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_message_bus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateMessageBusRequest( + ) + +def test_update_message_bus_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -8529,229 +9188,328 @@ def test_list_channels_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_channels in client._transport._wrapped_methods + assert client._transport.update_message_bus in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc - + client._transport._wrapped_methods[client._transport.update_message_bus] = mock_rpc request = {} - client.list_channels(request) + client.update_message_bus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_channels(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_message_bus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_update_message_bus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.update_message_bus in client._client._transport._wrapped_methods - # verify fields with default values are dropped + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_message_bus] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.update_message_bus(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - jsonified_request["parent"] = 'parent_value' + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) + await client.update_message_bus(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) +@pytest.mark.asyncio +async def test_update_message_bus_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateMessageBusRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_message_bus(request) - # Convert return value to protobuf type - return_value = eventarc.ListChannelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.UpdateMessageBusRequest() + assert args[0] == request - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.list_channels(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_update_message_bus_async_from_dict(): + await test_update_message_bus_async(request_type=dict) +def test_update_message_bus_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_list_channels_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateMessageBusRequest() - unset_fields = transport.list_channels._get_unset_required_fields({}) - assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + request.message_bus.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_message_bus(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_list_channels_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'message_bus.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_message_bus_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelsResponse() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateMessageBusRequest() - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + request.message_bus.name = 'name_value' - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_message_bus(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListChannelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.list_channels(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'message_bus.name=name_value', + ) in kw['metadata'] + + +def test_update_message_bus_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_message_bus( + message_bus=gce_message_bus.MessageBus(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].message_bus + mock_val = gce_message_bus.MessageBus(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -def test_list_channels_rest_flattened_error(transport: str = 'rest'): +def test_update_message_bus_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_channels( - eventarc.ListChannelsRequest(), - parent='parent_value', + client.update_message_bus( + eventarc.UpdateMessageBusRequest(), + message_bus=gce_message_bus.MessageBus(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) +@pytest.mark.asyncio +async def test_update_message_bus_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_list_channels_rest_pager(transport: str = 'rest'): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_message_bus( + message_bus=gce_message_bus.MessageBus(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].message_bus + mock_val = gce_message_bus.MessageBus(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_message_bus_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_message_bus( + eventarc.UpdateMessageBusRequest(), + message_bus=gce_message_bus.MessageBus(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteMessageBusRequest, + dict, +]) +def test_delete_message_bus(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - eventarc.ListChannelsResponse( - channels=[ - channel.Channel(), - channel.Channel(), - channel.Channel(), - ], - next_page_token='abc', - ), - eventarc.ListChannelsResponse( - channels=[], - next_page_token='def', - ), - eventarc.ListChannelsResponse( - channels=[ - channel.Channel(), - ], - next_page_token='ghi', - ), - eventarc.ListChannelsResponse( - channels=[ - channel.Channel(), - channel.Channel(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple(eventarc.ListChannelsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_message_bus(request) - sample_request = {'parent': 'projects/sample1/locations/sample2'} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.DeleteMessageBusRequest() + assert args[0] == request - pager = client.list_channels(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, channel.Channel) - for i in results) - pages = list(client.list_channels(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token +def test_delete_message_bus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.DeleteMessageBusRequest( + name='name_value', + etag='etag_value', + ) -def test_create_channel_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_message_bus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteMessageBusRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_message_bus_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -8759,370 +9517,341 @@ def test_create_channel_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_channel_ in client._transport._wrapped_methods + assert client._transport.delete_message_bus in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc - + client._transport._wrapped_methods[client._transport.delete_message_bus] = mock_rpc request = {} - client.create_channel(request) + client.delete_message_bus(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_channel(request) + client.delete_message_bus(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_delete_message_bus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannelRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request_init["parent"] = "" - request_init["channel_id"] = "" - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.delete_message_bus in client._client._transport._wrapped_methods - # verify fields with default values are dropped - assert "channelId" not in jsonified_request - assert "validateOnly" not in jsonified_request + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_message_bus] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.delete_message_bus(request) - # verify required fields with default values are now present - assert "channelId" in jsonified_request - assert jsonified_request["channelId"] == request_init["channel_id"] - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - jsonified_request["parent"] = 'parent_value' - jsonified_request["channelId"] = 'channel_id_value' - jsonified_request["validateOnly"] = True + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("channel_id", "validate_only", )) - jsonified_request.update(unset_fields) + await client.delete_message_bus(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "channelId" in jsonified_request - assert jsonified_request["channelId"] == 'channel_id_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_message_bus_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteMessageBusRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.DeleteMessageBusRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_message_bus_async_from_dict(): + await test_delete_message_bus_async(request_type=dict) + +def test_delete_message_bus_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteMessageBusRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = 'name_value' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_message_bus(request) - response = client.create_channel(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "channelId", - "", - ), - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_create_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) +@pytest.mark.asyncio +async def test_delete_message_bus_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - unset_fields = transport.create_channel_._get_unset_required_fields({}) - assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteMessageBusRequest() + request.name = 'name_value' -def test_create_channel_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_message_bus(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +def test_delete_message_bus_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.create_channel(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_message_bus( + name='name_value', + etag='etag_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val -def test_create_channel_rest_flattened_error(transport: str = 'rest'): +def test_delete_message_bus_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_channel( - eventarc.CreateChannelRequest(), - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + client.delete_message_bus( + eventarc.DeleteMessageBusRequest(), + name='name_value', + etag='etag_value', ) - -def test_update_channel_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_channel in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc - - request = {} - client.update_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_channel(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannelRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] - - jsonified_request["validateOnly"] = True - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True - - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', +@pytest.mark.asyncio +async def test_delete_message_bus_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_channel(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') - expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_message_bus( + name='name_value', + etag='etag_value', + ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val -def test_update_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) +@pytest.mark.asyncio +async def test_delete_message_bus_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - unset_fields = transport.update_channel._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_message_bus( + eventarc.DeleteMessageBusRequest(), + name='name_value', + etag='etag_value', + ) -def test_update_channel_rest_flattened(): +@pytest.mark.parametrize("request_type", [ + eventarc.GetEnrollmentRequest, + dict, +]) +def test_get_enrollment(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # get truthy value for each flattened field - mock_args = dict( - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = enrollment.Enrollment( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + cel_match='cel_match_value', + message_bus='message_bus_value', + destination='destination_value', ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_enrollment(request) - client.update_channel(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.GetEnrollmentRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{channel.name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + # Establish that the response is the type that we expect. + assert isinstance(response, enrollment.Enrollment) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.cel_match == 'cel_match_value' + assert response.message_bus == 'message_bus_value' + assert response.destination == 'destination_value' -def test_update_channel_rest_flattened_error(transport: str = 'rest'): +def test_get_enrollment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='grpc', ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_channel( - eventarc.UpdateChannelRequest(), - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetEnrollmentRequest( + name='name_value', + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_enrollment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetEnrollmentRequest( + name='name_value', + ) -def test_delete_channel_rest_use_cached_wrapped_rpc(): +def test_get_enrollment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -9130,177 +9859,328 @@ def test_delete_channel_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_channel in client._transport._wrapped_methods + assert client._transport.get_enrollment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_enrollment] = mock_rpc request = {} - client.delete_channel(request) + client.get_enrollment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_channel(request) + client.get_enrollment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_get_enrollment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannelRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request_init["name"] = "" - request_init["validate_only"] = False - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.get_enrollment in client._client._transport._wrapped_methods - # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_enrollment] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.get_enrollment(request) - # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - jsonified_request["name"] = 'name_value' - jsonified_request["validateOnly"] = True + await client.get_enrollment(request) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", )) - jsonified_request.update(unset_fields) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True +@pytest.mark.asyncio +async def test_get_enrollment_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetEnrollmentRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(enrollment.Enrollment( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + cel_match='cel_match_value', + message_bus='message_bus_value', + destination='destination_value', + )) + response = await client.get_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.GetEnrollmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, enrollment.Enrollment) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.cel_match == 'cel_match_value' + assert response.message_bus == 'message_bus_value' + assert response.destination == 'destination_value' + + +@pytest.mark.asyncio +async def test_get_enrollment_async_from_dict(): + await test_get_enrollment_async(request_type=dict) +def test_get_enrollment_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetEnrollmentRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = 'name_value' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + call.return_value = enrollment.Enrollment() + client.get_enrollment(request) - response = client.delete_channel(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_delete_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) +@pytest.mark.asyncio +async def test_get_enrollment_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - unset_fields = transport.delete_channel._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetEnrollmentRequest() + request.name = 'name_value' -def test_delete_channel_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(enrollment.Enrollment()) + await client.get_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_enrollment_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = enrollment.Enrollment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_enrollment( + name='name_value', + ) - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_get_enrollment_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_enrollment( + eventarc.GetEnrollmentRequest(), name='name_value', ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +@pytest.mark.asyncio +async def test_get_enrollment_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.delete_channel(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = enrollment.Enrollment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(enrollment.Enrollment()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_enrollment( + name='name_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_delete_channel_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_enrollment_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_channel( - eventarc.DeleteChannelRequest(), + await client.get_enrollment( + eventarc.GetEnrollmentRequest(), name='name_value', ) -def test_get_provider_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize("request_type", [ + eventarc.ListEnrollmentsRequest, + dict, +]) +def test_list_enrollments(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListEnrollmentsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_enrollments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.ListEnrollmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnrollmentsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_enrollments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListEnrollmentsRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_enrollments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListEnrollmentsRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_enrollments_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -9308,395 +10188,543 @@ def test_get_provider_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_provider in client._transport._wrapped_methods + assert client._transport.list_enrollments in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_enrollments] = mock_rpc request = {} - client.get_provider(request) + client.list_enrollments(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_provider(request) + client.list_enrollments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_list_enrollments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert client._client._transport.list_enrollments in client._client._transport._wrapped_methods - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_enrollments] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_enrollments(request) - jsonified_request["name"] = 'name_value' + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.list_enrollments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_enrollments_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListEnrollmentsRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListEnrollmentsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_enrollments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.ListEnrollmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnrollmentsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +@pytest.mark.asyncio +async def test_list_enrollments_async_from_dict(): + await test_list_enrollments_async(request_type=dict) + +def test_list_enrollments_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = discovery.Provider() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListEnrollmentsRequest() - response_value = Response() - response_value.status_code = 200 + request.parent = 'parent_value' - # Convert return value to protobuf type - return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + call.return_value = eventarc.ListEnrollmentsResponse() + client.list_enrollments(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - response = client.get_provider(request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_enrollments_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_get_provider_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListEnrollmentsRequest() - unset_fields = transport.get_provider._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + request.parent = 'parent_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListEnrollmentsResponse()) + await client.list_enrollments(request) -def test_get_provider_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_enrollments_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = discovery.Provider() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/providers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListEnrollmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_enrollments( + parent='parent_value', ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_provider(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/providers/*}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val -def test_get_provider_rest_flattened_error(transport: str = 'rest'): +def test_list_enrollments_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_provider( - eventarc.GetProviderRequest(), - name='name_value', - ) - - -def test_list_providers_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + client.list_enrollments( + eventarc.ListEnrollmentsRequest(), + parent='parent_value', ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_providers in client._transport._wrapped_methods +@pytest.mark.asyncio +async def test_list_enrollments_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListEnrollmentsResponse() - request = {} - client.list_providers(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListEnrollmentsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_enrollments( + parent='parent_value', + ) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val - client.list_providers(request) +@pytest.mark.asyncio +async def test_list_enrollments_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_enrollments( + eventarc.ListEnrollmentsRequest(), + parent='parent_value', + ) -def test_list_providers_rest_required_fields(request_type=eventarc.ListProvidersRequest): - transport_class = transports.EventarcRestTransport +def test_list_enrollments_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + next_page_token='abc', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + ], + next_page_token='ghi', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + ), + RuntimeError, + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_enrollments(request={}, retry=retry, timeout=timeout) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, enrollment.Enrollment) + for i in results) +def test_list_enrollments_pages(transport_name: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport=transport_name, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = eventarc.ListProvidersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = eventarc.ListProvidersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + next_page_token='abc', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + ], + next_page_token='ghi', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_enrollments(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token - response = client.list_providers(request) +@pytest.mark.asyncio +async def test_list_enrollments_async_pager(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + next_page_token='abc', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + ], + next_page_token='ghi', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_enrollments(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, enrollment.Enrollment) + for i in responses) -def test_list_providers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_providers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) +@pytest.mark.asyncio +async def test_list_enrollments_async_pages(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + next_page_token='abc', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + ], + next_page_token='ghi', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_enrollments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token -def test_list_providers_rest_flattened(): +@pytest.mark.parametrize("request_type", [ + eventarc.CreateEnrollmentRequest, + dict, +]) +def test_create_enrollment(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListProvidersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListProvidersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_enrollment(request) - client.list_providers(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.CreateEnrollmentRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/providers" % client.transport._host, args[1]) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_providers_rest_flattened_error(transport: str = 'rest'): +def test_create_enrollment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='grpc', ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_providers( - eventarc.ListProvidersRequest(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateEnrollmentRequest( + parent='parent_value', + enrollment_id='enrollment_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_enrollment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateEnrollmentRequest( parent='parent_value', + enrollment_id='enrollment_id_value', ) +def test_create_enrollment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -def test_list_providers_rest_pager(transport: str = 'rest'): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - eventarc.ListProvidersResponse( - providers=[ - discovery.Provider(), - discovery.Provider(), - discovery.Provider(), - ], - next_page_token='abc', - ), - eventarc.ListProvidersResponse( - providers=[], - next_page_token='def', - ), - eventarc.ListProvidersResponse( - providers=[ - discovery.Provider(), - ], - next_page_token='ghi', - ), - eventarc.ListProvidersResponse( - providers=[ - discovery.Provider(), - discovery.Provider(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(eventarc.ListProvidersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values + # Ensure method has been cached + assert client._transport.create_enrollment in client._transport._wrapped_methods - sample_request = {'parent': 'projects/sample1/locations/sample2'} + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_enrollment] = mock_rpc + request = {} + client.create_enrollment(request) - pager = client.list_providers(request=sample_request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, discovery.Provider) - for i in results) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - pages = list(client.list_providers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + client.create_enrollment(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_get_channel_connection_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_create_enrollment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) # Should wrap all calls on client creation @@ -9704,165 +10732,296 @@ def test_get_channel_connection_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_channel_connection in client._transport._wrapped_methods + assert client._client._transport.create_enrollment in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_enrollment] = mock_rpc request = {} - client.get_channel_connection(request) + await client.create_enrollment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_channel_connection(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_enrollment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_create_enrollment_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateEnrollmentRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetChannelConnectionRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_enrollment(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.CreateEnrollmentRequest() + assert args[0] == request - jsonified_request["name"] = 'name_value' + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +@pytest.mark.asyncio +async def test_create_enrollment_async_from_dict(): + await test_create_enrollment_async(request_type=dict) +def test_create_enrollment_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = channel_connection.ChannelConnection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateEnrollmentRequest() - response_value = Response() - response_value.status_code = 200 + request.parent = 'parent_value' - # Convert return value to protobuf type - return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_enrollment(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - response = client.get_channel_connection(request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_enrollment_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_get_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateEnrollmentRequest() - unset_fields = transport.get_channel_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_enrollment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_get_channel_connection_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_enrollment_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel_connection.ChannelConnection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_enrollment( + parent='parent_value', + enrollment=gce_enrollment.Enrollment(name='name_value'), + enrollment_id='enrollment_id_value', ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_channel_connection(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].enrollment + mock_val = gce_enrollment.Enrollment(name='name_value') + assert arg == mock_val + arg = args[0].enrollment_id + mock_val = 'enrollment_id_value' + assert arg == mock_val -def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): +def test_create_enrollment_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_channel_connection( - eventarc.GetChannelConnectionRequest(), - name='name_value', + client.create_enrollment( + eventarc.CreateEnrollmentRequest(), + parent='parent_value', + enrollment=gce_enrollment.Enrollment(name='name_value'), + enrollment_id='enrollment_id_value', ) - -def test_list_channel_connections_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_create_enrollment_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_enrollment( + parent='parent_value', + enrollment=gce_enrollment.Enrollment(name='name_value'), + enrollment_id='enrollment_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].enrollment + mock_val = gce_enrollment.Enrollment(name='name_value') + assert arg == mock_val + arg = args[0].enrollment_id + mock_val = 'enrollment_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_enrollment_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_enrollment( + eventarc.CreateEnrollmentRequest(), + parent='parent_value', + enrollment=gce_enrollment.Enrollment(name='name_value'), + enrollment_id='enrollment_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateEnrollmentRequest, + dict, +]) +def test_update_enrollment(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.UpdateEnrollmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_enrollment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.UpdateEnrollmentRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_enrollment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateEnrollmentRequest( + ) + +def test_update_enrollment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -9870,229 +11029,328 @@ def test_list_channel_connections_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_channel_connections in client._transport._wrapped_methods + assert client._transport.update_enrollment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc - + client._transport._wrapped_methods[client._transport.update_enrollment] = mock_rpc request = {} - client.list_channel_connections(request) + client.update_enrollment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_channel_connections(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_enrollment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_update_enrollment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_list_channel_connections_rest_required_fields(request_type=eventarc.ListChannelConnectionsRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.update_enrollment in client._client._transport._wrapped_methods - # verify fields with default values are dropped + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_enrollment] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.update_enrollment(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - jsonified_request["parent"] = 'parent_value' + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) + await client.update_enrollment(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', +@pytest.mark.asyncio +async def test_update_enrollment_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateEnrollmentRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_channel_connections(request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_enrollment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.UpdateEnrollmentRequest() + assert args[0] == request -def test_list_channel_connections_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - unset_fields = transport.list_channel_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) +@pytest.mark.asyncio +async def test_update_enrollment_async_from_dict(): + await test_update_enrollment_async(request_type=dict) -def test_list_channel_connections_rest_flattened(): +def test_update_enrollment_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateEnrollmentRequest() - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) + request.enrollment.name = 'name_value' - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_enrollment(request) - client.list_channel_connections(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'enrollment.name=name_value', + ) in kw['metadata'] -def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): +@pytest.mark.asyncio +async def test_update_enrollment_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateEnrollmentRequest() + + request.enrollment.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'enrollment.name=name_value', + ) in kw['metadata'] + + +def test_update_enrollment_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_enrollment( + enrollment=gce_enrollment.Enrollment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].enrollment + mock_val = gce_enrollment.Enrollment(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_enrollment_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_channel_connections( - eventarc.ListChannelConnectionsRequest(), - parent='parent_value', + client.update_enrollment( + eventarc.UpdateEnrollmentRequest(), + enrollment=gce_enrollment.Enrollment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) +@pytest.mark.asyncio +async def test_update_enrollment_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_list_channel_connections_rest_pager(transport: str = 'rest'): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_enrollment( + enrollment=gce_enrollment.Enrollment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].enrollment + mock_val = gce_enrollment.Enrollment(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_enrollment_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_enrollment( + eventarc.UpdateEnrollmentRequest(), + enrollment=gce_enrollment.Enrollment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteEnrollmentRequest, + dict, +]) +def test_delete_enrollment(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - eventarc.ListChannelConnectionsResponse( - channel_connections=[ - channel_connection.ChannelConnection(), - channel_connection.ChannelConnection(), - channel_connection.ChannelConnection(), - ], - next_page_token='abc', - ), - eventarc.ListChannelConnectionsResponse( - channel_connections=[], - next_page_token='def', - ), - eventarc.ListChannelConnectionsResponse( - channel_connections=[ - channel_connection.ChannelConnection(), - ], - next_page_token='ghi', - ), - eventarc.ListChannelConnectionsResponse( - channel_connections=[ - channel_connection.ChannelConnection(), - channel_connection.ChannelConnection(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple(eventarc.ListChannelConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_enrollment(request) - sample_request = {'parent': 'projects/sample1/locations/sample2'} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.DeleteEnrollmentRequest() + assert args[0] == request - pager = client.list_channel_connections(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, channel_connection.ChannelConnection) - for i in results) - pages = list(client.list_channel_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token +def test_delete_enrollment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.DeleteEnrollmentRequest( + name='name_value', + etag='etag_value', + ) -def test_create_channel_connection_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_enrollment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteEnrollmentRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_enrollment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10100,347 +11358,339 @@ def test_create_channel_connection_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_channel_connection in client._transport._wrapped_methods + assert client._transport.delete_enrollment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc - + client._transport._wrapped_methods[client._transport.delete_enrollment] = mock_rpc request = {} - client.create_channel_connection(request) + client.delete_enrollment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_channel_connection(request) + client.delete_enrollment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_delete_enrollment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_create_channel_connection_rest_required_fields(request_type=eventarc.CreateChannelConnectionRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["channel_connection_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped - assert "channelConnectionId" not in jsonified_request + # Ensure method has been cached + assert client._client._transport.delete_enrollment in client._client._transport._wrapped_methods - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_enrollment] = mock_rpc - # verify required fields with default values are now present - assert "channelConnectionId" in jsonified_request - assert jsonified_request["channelConnectionId"] == request_init["channel_connection_id"] + request = {} + await client.delete_enrollment(request) - jsonified_request["parent"] = 'parent_value' - jsonified_request["channelConnectionId"] = 'channel_connection_id_value' + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("channel_connection_id", )) - jsonified_request.update(unset_fields) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "channelConnectionId" in jsonified_request - assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' + await client.delete_enrollment(request) - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +@pytest.mark.asyncio +async def test_delete_enrollment_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteEnrollmentRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response = client.create_channel_connection(request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_params = [ - ( - "channelConnectionId", - "", - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_enrollment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.DeleteEnrollmentRequest() + assert args[0] == request -def test_create_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - unset_fields = transport.create_channel_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) +@pytest.mark.asyncio +async def test_delete_enrollment_async_from_dict(): + await test_delete_enrollment_async(request_type=dict) -def test_create_channel_connection_rest_flattened(): +def test_delete_enrollment_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteEnrollmentRequest() - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + request.name = 'name_value' - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_enrollment(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.create_channel_connection(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_enrollment_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteEnrollmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_enrollment_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_enrollment( + name='name_value', + etag='etag_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val -def test_create_channel_connection_rest_flattened_error(transport: str = 'rest'): +def test_delete_enrollment_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_channel_connection( - eventarc.CreateChannelConnectionRequest(), - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + client.delete_enrollment( + eventarc.DeleteEnrollmentRequest(), + name='name_value', + etag='etag_value', ) +@pytest.mark.asyncio +async def test_delete_enrollment_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_enrollment( + name='name_value', + etag='etag_value', ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val - # Ensure method has been cached - assert client._transport.delete_channel_connection in client._transport._wrapped_methods +@pytest.mark.asyncio +async def test_delete_enrollment_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_enrollment( + eventarc.DeleteEnrollmentRequest(), + name='name_value', + etag='etag_value', + ) - request = {} - client.delete_channel_connection(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +@pytest.mark.parametrize("request_type", [ + eventarc.GetPipelineRequest, + dict, +]) +def test_get_pipeline(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - client.delete_channel_connection(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pipeline.Pipeline( + name='name_value', + uid='uid_value', + display_name='display_name_value', + crypto_key_name='crypto_key_name_value', + etag='etag_value', + satisfies_pzs=True, + ) + response = client.get_pipeline(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.GetPipelineRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, pipeline.Pipeline) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + assert response.etag == 'etag_value' + assert response.satisfies_pzs is True -def test_delete_channel_connection_rest_required_fields(request_type=eventarc.DeleteChannelConnectionRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_channel_connection(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_channel_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - -def test_delete_channel_connection_rest_flattened(): +def test_get_pipeline_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc', ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_channel_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) - - -def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest'): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetPipelineRequest( + name='name_value', ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_channel_connection( - eventarc.DeleteChannelConnectionRequest(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_pipeline(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetPipelineRequest( name='name_value', ) - -def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): +def test_get_pipeline_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10448,165 +11698,326 @@ def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_google_channel_config in client._transport._wrapped_methods + assert client._transport.get_pipeline in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_pipeline] = mock_rpc request = {} - client.get_google_channel_config(request) + client.get_pipeline(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_google_channel_config(request) + client.get_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_get_pipeline_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_get_google_channel_config_rest_required_fields(request_type=eventarc.GetGoogleChannelConfigRequest): - transport_class = transports.EventarcRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert client._client._transport.get_pipeline in client._client._transport._wrapped_methods - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_pipeline] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_pipeline(request) - jsonified_request["name"] = 'name_value' + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.get_pipeline(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', +@pytest.mark.asyncio +async def test_get_pipeline_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetPipelineRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = google_channel_config.GoogleChannelConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_google_channel_config(request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pipeline.Pipeline( + name='name_value', + uid='uid_value', + display_name='display_name_value', + crypto_key_name='crypto_key_name_value', + etag='etag_value', + satisfies_pzs=True, + )) + response = await client.get_pipeline(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.GetPipelineRequest() + assert args[0] == request -def test_get_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the response is the type that we expect. + assert isinstance(response, pipeline.Pipeline) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + assert response.etag == 'etag_value' + assert response.satisfies_pzs is True - unset_fields = transport.get_google_channel_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) +@pytest.mark.asyncio +async def test_get_pipeline_async_from_dict(): + await test_get_pipeline_async(request_type=dict) -def test_get_google_channel_config_rest_flattened(): +def test_get_pipeline_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = google_channel_config.GoogleChannelConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetPipelineRequest() - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) + request.name = 'name_value' - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + call.return_value = pipeline.Pipeline() + client.get_pipeline(request) - client.get_google_channel_config(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_pipeline_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetPipelineRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pipeline.Pipeline()) + await client.get_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_pipeline_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pipeline.Pipeline() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_pipeline( + name='name_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest'): +def test_get_pipeline_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_google_channel_config( - eventarc.GetGoogleChannelConfigRequest(), + client.get_pipeline( + eventarc.GetPipelineRequest(), name='name_value', ) +@pytest.mark.asyncio +async def test_get_pipeline_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pipeline.Pipeline() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pipeline.Pipeline()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_pipeline( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_pipeline_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_pipeline( + eventarc.GetPipelineRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListPipelinesRequest, + dict, +]) +def test_list_pipelines(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListPipelinesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_pipelines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.ListPipelinesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPipelinesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_pipelines_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListPipelinesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_pipelines(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListPipelinesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_pipelines_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10614,1208 +12025,15490 @@ def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_google_channel_config in client._transport._wrapped_methods + assert client._transport.list_pipelines in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_pipelines] = mock_rpc request = {} - client.update_google_channel_config(request) + client.list_pipelines(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_google_channel_config(request) + client.list_pipelines(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_list_pipelines_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -def test_update_google_channel_config_rest_required_fields(request_type=eventarc.UpdateGoogleChannelConfigRequest): - transport_class = transports.EventarcRestTransport + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + # Ensure method has been cached + assert client._client._transport.list_pipelines in client._client._transport._wrapped_methods - # verify fields with default values are dropped + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_pipelines] = mock_rpc - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.list_pipelines(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) + await client.list_pipelines(request) - # verify required fields with non-default values are left alone + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', +@pytest.mark.asyncio +async def test_list_pipelines_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListPipelinesRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = gce_google_channel_config.GoogleChannelConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListPipelinesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_pipelines(request) - # Convert return value to protobuf type - return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.ListPipelinesRequest() + assert args[0] == request - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPipelinesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] - response = client.update_google_channel_config(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_pipelines_async_from_dict(): + await test_list_pipelines_async(request_type=dict) +def test_list_pipelines_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_update_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListPipelinesRequest() - unset_fields = transport.update_google_channel_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) + request.parent = 'parent_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + call.return_value = eventarc.ListPipelinesResponse() + client.list_pipelines(request) -def test_update_google_channel_config_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_pipelines_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gce_google_channel_config.GoogleChannelConfig() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListPipelinesRequest() - # get arguments that satisfy an http rule for this method - sample_request = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request.parent = 'parent_value' - # get truthy value for each flattened field - mock_args = dict( - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListPipelinesResponse()) + await client.list_pipelines(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.update_google_channel_config(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_pipelines_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListPipelinesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_pipelines( + parent='parent_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val -def test_update_google_channel_config_rest_flattened_error(transport: str = 'rest'): +def test_list_pipelines_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_google_channel_config( - eventarc.UpdateGoogleChannelConfigRequest(), - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + client.list_pipelines( + eventarc.ListPipelinesRequest(), + parent='parent_value', ) - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.asyncio +async def test_list_pipelines_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - with pytest.raises(ValueError): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListPipelinesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListPipelinesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_pipelines( + parent='parent_value', ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_pipelines_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. with pytest.raises(ValueError): - client = EventarcClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + await client.list_pipelines( + eventarc.ListPipelinesRequest(), + parent='parent_value', ) - # It is an error to provide an api_key and a transport instance. - transport = transports.EventarcGrpcTransport( + +def test_list_pipelines_pager(transport_name: str = "grpc"): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = EventarcClient( - client_options=options, - transport=transport, + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + next_page_token='abc', + ), + eventarc.ListPipelinesResponse( + pipelines=[], + next_page_token='def', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + ], + next_page_token='ghi', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + ), + RuntimeError, ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = EventarcClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), ) + pager = client.list_pipelines(request={}, retry=retry, timeout=timeout) - # It is an error to provide scopes and a transport instance. - transport = transports.EventarcGrpcTransport( + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pipeline.Pipeline) + for i in results) +def test_list_pipelines_pages(transport_name: str = "grpc"): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - with pytest.raises(ValueError): - client = EventarcClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + next_page_token='abc', + ), + eventarc.ListPipelinesResponse( + pipelines=[], + next_page_token='def', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + ], + next_page_token='ghi', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + ), + RuntimeError, + ) + pages = list(client.list_pipelines(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.asyncio +async def test_list_pipelines_async_pager(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - client = EventarcClient(transport=transport) - assert client.transport is transport -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + next_page_token='abc', + ), + eventarc.ListPipelinesResponse( + pipelines=[], + next_page_token='def', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + ], + next_page_token='ghi', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_pipelines(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - transport = transports.EventarcGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + assert len(responses) == 6 + assert all(isinstance(i, pipeline.Pipeline) + for i in responses) -@pytest.mark.parametrize("transport_class", [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - transports.EventarcRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() -def test_transport_kind_grpc(): - transport = EventarcClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() +@pytest.mark.asyncio +async def test_list_pipelines_async_pages(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - assert transport.kind == "grpc" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + next_page_token='abc', + ), + eventarc.ListPipelinesResponse( + pipelines=[], + next_page_token='def', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + ], + next_page_token='ghi', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_pipelines(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token -def test_initialize_client_w_grpc(): +@pytest.mark.parametrize("request_type", [ + eventarc.CreatePipelineRequest, + dict, +]) +def test_create_pipeline(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + transport=transport, ) - assert client is not None + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_trigger_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_trigger), + type(client.transport.create_pipeline), '__call__') as call: - call.return_value = trigger.Trigger() - client.get_trigger(request=None) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetTriggerRequest() + request = eventarc.CreatePipelineRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_triggers_empty_call_grpc(): +def test_create_pipeline_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport='grpc', ) - # Mock the actual call, and fake the request. + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreatePipelineRequest( + parent='parent_value', + pipeline_id='pipeline_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_triggers), + type(client.transport.create_pipeline), '__call__') as call: - call.return_value = eventarc.ListTriggersResponse() - client.list_triggers(request=None) - - # Establish that the underlying stub method was called. + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_pipeline(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListTriggersRequest() + assert args[0] == eventarc.CreatePipelineRequest( + parent='parent_value', + pipeline_id='pipeline_id_value', + ) - assert args[0] == request_msg +def test_create_pipeline_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_trigger_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.create_pipeline in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_trigger(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_pipeline] = mock_rpc + request = {} + client.create_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateTriggerRequest() + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - assert args[0] == request_msg + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + client.create_pipeline(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_trigger_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_trigger(request=None) +@pytest.mark.asyncio +async def test_create_pipeline_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateTriggerRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert client._client._transport.create_pipeline in client._client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_pipeline] = mock_rpc -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_trigger_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + request = {} + await client.create_pipeline(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_trigger(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteTriggerRequest() + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - assert args[0] == request_msg + await client.create_pipeline(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_channel_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +@pytest.mark.asyncio +async def test_create_pipeline_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreatePipelineRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # Mock the actual call, and fake the request. + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel), + type(client.transport.create_pipeline), '__call__') as call: - call.return_value = channel.Channel() - client.get_channel(request=None) + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetChannelRequest() + request = eventarc.CreatePipelineRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_channels_empty_call_grpc(): +@pytest.mark.asyncio +async def test_create_pipeline_async_from_dict(): + await test_create_pipeline_async(request_type=dict) + +def test_create_pipeline_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call, and fake the request. + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreatePipelineRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channels), + type(client.transport.create_pipeline), '__call__') as call: - call.return_value = eventarc.ListChannelsResponse() - client.list_channels(request=None) + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListChannelsRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_channel_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +@pytest.mark.asyncio +async def test_create_pipeline_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the actual call, and fake the request. + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreatePipelineRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_), + type(client.transport.create_pipeline), '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_channel(request=None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateChannelRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_channel_empty_call_grpc(): +def test_create_pipeline_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel), + type(client.transport.create_pipeline), '__call__') as call: + # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/op') - client.update_channel(request=None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_pipeline( + parent='parent_value', + pipeline=gce_pipeline.Pipeline(name='name_value'), + pipeline_id='pipeline_id_value', + ) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateChannelRequest() - - assert args[0] == request_msg + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].pipeline + mock_val = gce_pipeline.Pipeline(name='name_value') + assert arg == mock_val + arg = args[0].pipeline_id + mock_val = 'pipeline_id_value' + assert arg == mock_val -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_channel_empty_call_grpc(): +def test_create_pipeline_flattened_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call, and fake the request. + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_pipeline( + eventarc.CreatePipelineRequest(), + parent='parent_value', + pipeline=gce_pipeline.Pipeline(name='name_value'), + pipeline_id='pipeline_id_value', + ) + +@pytest.mark.asyncio +async def test_create_pipeline_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel), + type(client.transport.create_pipeline), '__call__') as call: + # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_channel(request=None) - # Establish that the underlying stub method was called. - call.assert_called() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_pipeline( + parent='parent_value', + pipeline=gce_pipeline.Pipeline(name='name_value'), + pipeline_id='pipeline_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteChannelRequest() + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].pipeline + mock_val = gce_pipeline.Pipeline(name='name_value') + assert arg == mock_val + arg = args[0].pipeline_id + mock_val = 'pipeline_id_value' + assert arg == mock_val - assert args[0] == request_msg +@pytest.mark.asyncio +async def test_create_pipeline_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_pipeline( + eventarc.CreatePipelineRequest(), + parent='parent_value', + pipeline=gce_pipeline.Pipeline(name='name_value'), + pipeline_id='pipeline_id_value', + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_provider_empty_call_grpc(): +@pytest.mark.parametrize("request_type", [ + eventarc.UpdatePipelineRequest, + dict, +]) +def test_update_pipeline(request_type, transport: str = 'grpc'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_provider), + type(client.transport.update_pipeline), '__call__') as call: - call.return_value = discovery.Provider() - client.get_provider(request=None) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetProviderRequest() + request = eventarc.UpdatePipelineRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_providers_empty_call_grpc(): +def test_update_pipeline_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport='grpc', ) - # Mock the actual call, and fake the request. + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.UpdatePipelineRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_providers), + type(client.transport.update_pipeline), '__call__') as call: - call.return_value = eventarc.ListProvidersResponse() - client.list_providers(request=None) - - # Establish that the underlying stub method was called. + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_pipeline(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListProvidersRequest() + assert args[0] == eventarc.UpdatePipelineRequest( + ) - assert args[0] == request_msg +def test_update_pipeline_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_channel_connection_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.update_pipeline in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: - call.return_value = channel_connection.ChannelConnection() - client.get_channel_connection(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_pipeline] = mock_rpc + request = {} + client.update_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetChannelConnectionRequest() + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - assert args[0] == request_msg + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + client.update_pipeline(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_channel_connections_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: - call.return_value = eventarc.ListChannelConnectionsResponse() - client.list_channel_connections(request=None) +@pytest.mark.asyncio +async def test_update_pipeline_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListChannelConnectionsRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert client._client._transport.update_pipeline in client._client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_pipeline] = mock_rpc -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_channel_connection_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + request = {} + await client.update_pipeline(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_channel_connection(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateChannelConnectionRequest() + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - assert args[0] == request_msg + await client.update_pipeline(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_channel_connection_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +@pytest.mark.asyncio +async def test_update_pipeline_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdatePipelineRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # Mock the actual call, and fake the request. + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), + type(client.transport.update_pipeline), '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_channel_connection(request=None) + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteChannelConnectionRequest() + request = eventarc.UpdatePipelineRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_google_channel_config_empty_call_grpc(): +@pytest.mark.asyncio +async def test_update_pipeline_async_from_dict(): + await test_update_pipeline_async(request_type=dict) + +def test_update_pipeline_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - call.return_value = google_channel_config.GoogleChannelConfig() - client.get_google_channel_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetGoogleChannelConfigRequest() - - assert args[0] == request_msg - + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdatePipelineRequest() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_google_channel_config_empty_call_grpc(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + request.pipeline.name = 'name_value' - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), + type(client.transport.update_pipeline), '__call__') as call: - call.return_value = gce_google_channel_config.GoogleChannelConfig() - client.update_google_channel_config(request=None) + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateGoogleChannelConfigRequest() - - assert args[0] == request_msg - + assert args[0] == request -def test_transport_kind_grpc_asyncio(): - transport = EventarcAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'pipeline.name=name_value', + ) in kw['metadata'] -def test_initialize_client_w_grpc_asyncio(): +@pytest.mark.asyncio +async def test_update_pipeline_field_headers_async(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio" ) - assert client is not None + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdatePipelineRequest() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_trigger_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + request.pipeline.name = 'name_value' - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_trigger), + type(client.transport.update_pipeline), '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - )) - await client.get_trigger(request=None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetTriggerRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'pipeline.name=name_value', + ) in kw['metadata'] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_triggers_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_update_pipeline_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_triggers), + type(client.transport.update_pipeline), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_triggers(request=None) + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_pipeline( + pipeline=gce_pipeline.Pipeline(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListTriggersRequest() + arg = args[0].pipeline + mock_val = gce_pipeline.Pipeline(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val - assert args[0] == request_msg +def test_update_pipeline_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_pipeline( + eventarc.UpdatePipelineRequest(), + pipeline=gce_pipeline.Pipeline(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_create_trigger_empty_call_grpc_asyncio(): +async def test_update_pipeline_flattened_async(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_trigger), + type(client.transport.update_pipeline), '__call__') as call: # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - await client.create_trigger(request=None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_pipeline( + pipeline=gce_pipeline.Pipeline(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateTriggerRequest() - - assert args[0] == request_msg - + arg = args[0].pipeline + mock_val = gce_pipeline.Pipeline(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_trigger_empty_call_grpc_asyncio(): +async def test_update_pipeline_flattened_error_async(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_pipeline( + eventarc.UpdatePipelineRequest(), + pipeline=gce_pipeline.Pipeline(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - await client.update_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateTriggerRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_trigger_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +@pytest.mark.parametrize("request_type", [ + eventarc.DeletePipelineRequest, + dict, +]) +def test_delete_pipeline(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_trigger), + type(client.transport.delete_pipeline), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_trigger(request=None) + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteTriggerRequest() + request = eventarc.DeletePipelineRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_channel_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_pipeline_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', ) - # Mock the actual call, and fake the request. + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.DeletePipelineRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel), + type(client.transport.delete_pipeline), '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - )) - await client.get_channel(request=None) - - # Establish that the underlying stub method was called. + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_pipeline(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetChannelRequest() + assert args[0] == eventarc.DeletePipelineRequest( + name='name_value', + etag='etag_value', + ) - assert args[0] == request_msg +def test_delete_pipeline_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_channels_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.delete_pipeline in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_channels(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_pipeline] = mock_rpc + request = {} + client.delete_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListChannelsRequest() + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - assert args[0] == request_msg + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + client.delete_pipeline(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_channel_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') +@pytest.mark.asyncio +async def test_delete_pipeline_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - await client.create_channel(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateChannelRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert client._client._transport.delete_pipeline in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_pipeline] = mock_rpc + request = {} + await client.delete_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_channel_empty_call_grpc_asyncio(): +async def test_delete_pipeline_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeletePipelineRequest): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + transport=transport, ) - # Mock the actual call, and fake the request. + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_channel), + type(client.transport.delete_pipeline), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - await client.update_channel(request=None) + response = await client.delete_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateChannelRequest() + request = eventarc.DeletePipelineRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_delete_channel_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +async def test_delete_pipeline_async_from_dict(): + await test_delete_pipeline_async(request_type=dict) + +def test_delete_pipeline_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call, and fake the request. + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeletePipelineRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel), + type(client.transport.delete_pipeline), '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_channel(request=None) + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_pipeline(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteChannelRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_provider_empty_call_grpc_asyncio(): +async def test_delete_pipeline_field_headers_async(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeletePipelineRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_provider), + type(client.transport.delete_pipeline), '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_pipeline_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_pipeline( name='name_value', - display_name='display_name_value', - )) - await client.get_provider(request=None) + etag='etag_value', + ) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetProviderRequest() + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val - assert args[0] == request_msg +def test_delete_pipeline_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_pipeline( + eventarc.DeletePipelineRequest(), + name='name_value', + etag='etag_value', + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_list_providers_empty_call_grpc_asyncio(): +async def test_delete_pipeline_flattened_async(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_providers), + type(client.transport.delete_pipeline), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_providers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListProvidersRequest() + call.return_value = operations_pb2.Operation(name='operations/op') - assert args[0] == request_msg + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_pipeline( + name='name_value', + etag='etag_value', + ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_channel_connection_empty_call_grpc_asyncio(): +async def test_delete_pipeline_flattened_error_async(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_pipeline( + eventarc.DeletePipelineRequest(), + name='name_value', + etag='etag_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleApiSourceRequest, + dict, +]) +def test_get_google_api_source(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), + type(client.transport.get_google_api_source), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( + call.return_value = google_api_source.GoogleApiSource( name='name_value', uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - )) - await client.get_channel_connection(request=None) + etag='etag_value', + display_name='display_name_value', + destination='destination_value', + crypto_key_name='crypto_key_name_value', + ) + response = client.get_google_api_source(request) - # Establish that the underlying stub method was called. + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.GetGoogleApiSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, google_api_source.GoogleApiSource) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.destination == 'destination_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_google_api_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetGoogleApiSourceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_api_source), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_google_api_source(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetChannelConnectionRequest() + assert args[0] == eventarc.GetGoogleApiSourceRequest( + name='name_value', + ) - assert args[0] == request_msg +def test_get_google_api_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + # Ensure method has been cached + assert client._transport.get_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_google_api_source] = mock_rpc + request = {} + client.get_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_list_channel_connections_empty_call_grpc_asyncio(): +async def test_get_google_api_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_google_api_source in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_google_api_source] = mock_rpc + + request = {} + await client.get_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_google_api_source_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleApiSourceRequest): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + transport=transport, ) - # Mock the actual call, and fake the request. + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), + type(client.transport.get_google_api_source), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(google_api_source.GoogleApiSource( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + destination='destination_value', + crypto_key_name='crypto_key_name_value', )) - await client.list_channel_connections(request=None) + response = await client.get_google_api_source(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListChannelConnectionsRequest() + request = eventarc.GetGoogleApiSourceRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, google_api_source.GoogleApiSource) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.destination == 'destination_value' + assert response.crypto_key_name == 'crypto_key_name_value' -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_create_channel_connection_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +async def test_get_google_api_source_async_from_dict(): + await test_get_google_api_source_async(request_type=dict) + +def test_get_google_api_source_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call, and fake the request. + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetGoogleApiSourceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), + type(client.transport.get_google_api_source), + '__call__') as call: + call.return_value = google_api_source.GoogleApiSource() + client.get_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_google_api_source_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetGoogleApiSourceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_api_source), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_api_source.GoogleApiSource()) + await client.get_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_google_api_source_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_api_source), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + call.return_value = google_api_source.GoogleApiSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_google_api_source( + name='name_value', ) - await client.create_channel_connection(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateChannelConnectionRequest() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_google_api_source_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_google_api_source( + eventarc.GetGoogleApiSourceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_google_api_source_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = google_api_source.GoogleApiSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_api_source.GoogleApiSource()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_google_api_source( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_google_api_source_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_google_api_source( + eventarc.GetGoogleApiSourceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListGoogleApiSourcesRequest, + dict, +]) +def test_list_google_api_sources(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListGoogleApiSourcesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_google_api_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.ListGoogleApiSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGoogleApiSourcesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_google_api_sources_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListGoogleApiSourcesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_google_api_sources(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListGoogleApiSourcesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_google_api_sources_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_google_api_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_google_api_sources] = mock_rpc + request = {} + client.list_google_api_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_google_api_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_google_api_sources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_google_api_sources in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_google_api_sources] = mock_rpc + + request = {} + await client.list_google_api_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_google_api_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_google_api_sources_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListGoogleApiSourcesRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListGoogleApiSourcesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_google_api_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.ListGoogleApiSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGoogleApiSourcesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_google_api_sources_async_from_dict(): + await test_list_google_api_sources_async(request_type=dict) + +def test_list_google_api_sources_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListGoogleApiSourcesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + call.return_value = eventarc.ListGoogleApiSourcesResponse() + client.list_google_api_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_google_api_sources_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListGoogleApiSourcesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListGoogleApiSourcesResponse()) + await client.list_google_api_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_google_api_sources_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListGoogleApiSourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_google_api_sources( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_google_api_sources_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_google_api_sources( + eventarc.ListGoogleApiSourcesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_google_api_sources_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListGoogleApiSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListGoogleApiSourcesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_google_api_sources( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_google_api_sources_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_google_api_sources( + eventarc.ListGoogleApiSourcesRequest(), + parent='parent_value', + ) + + +def test_list_google_api_sources_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + next_page_token='abc', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[], + next_page_token='def', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + ], + next_page_token='ghi', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_google_api_sources(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, google_api_source.GoogleApiSource) + for i in results) +def test_list_google_api_sources_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + next_page_token='abc', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[], + next_page_token='def', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + ], + next_page_token='ghi', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_google_api_sources(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_google_api_sources_async_pager(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + next_page_token='abc', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[], + next_page_token='def', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + ], + next_page_token='ghi', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_google_api_sources(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, google_api_source.GoogleApiSource) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_google_api_sources_async_pages(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + next_page_token='abc', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[], + next_page_token='def', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + ], + next_page_token='ghi', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_google_api_sources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateGoogleApiSourceRequest, + dict, +]) +def test_create_google_api_source(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.CreateGoogleApiSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_google_api_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateGoogleApiSourceRequest( + parent='parent_value', + google_api_source_id='google_api_source_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_google_api_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateGoogleApiSourceRequest( + parent='parent_value', + google_api_source_id='google_api_source_id_value', + ) + +def test_create_google_api_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_google_api_source] = mock_rpc + request = {} + client.create_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_google_api_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_google_api_source in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_google_api_source] = mock_rpc + + request = {} + await client.create_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_google_api_source_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateGoogleApiSourceRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.CreateGoogleApiSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_google_api_source_async_from_dict(): + await test_create_google_api_source_async(request_type=dict) + +def test_create_google_api_source_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateGoogleApiSourceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_google_api_source_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateGoogleApiSourceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_google_api_source_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_google_api_source( + parent='parent_value', + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + google_api_source_id='google_api_source_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].google_api_source + mock_val = gce_google_api_source.GoogleApiSource(name='name_value') + assert arg == mock_val + arg = args[0].google_api_source_id + mock_val = 'google_api_source_id_value' + assert arg == mock_val + + +def test_create_google_api_source_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_google_api_source( + eventarc.CreateGoogleApiSourceRequest(), + parent='parent_value', + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + google_api_source_id='google_api_source_id_value', + ) + +@pytest.mark.asyncio +async def test_create_google_api_source_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_google_api_source( + parent='parent_value', + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + google_api_source_id='google_api_source_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].google_api_source + mock_val = gce_google_api_source.GoogleApiSource(name='name_value') + assert arg == mock_val + arg = args[0].google_api_source_id + mock_val = 'google_api_source_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_google_api_source_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_google_api_source( + eventarc.CreateGoogleApiSourceRequest(), + parent='parent_value', + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + google_api_source_id='google_api_source_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleApiSourceRequest, + dict, +]) +def test_update_google_api_source(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.UpdateGoogleApiSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_google_api_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.UpdateGoogleApiSourceRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_google_api_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleApiSourceRequest( + ) + +def test_update_google_api_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_google_api_source] = mock_rpc + request = {} + client.update_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_google_api_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_google_api_source in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_google_api_source] = mock_rpc + + request = {} + await client.update_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_google_api_source_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleApiSourceRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.UpdateGoogleApiSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_google_api_source_async_from_dict(): + await test_update_google_api_source_async(request_type=dict) + +def test_update_google_api_source_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateGoogleApiSourceRequest() + + request.google_api_source.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'google_api_source.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_google_api_source_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateGoogleApiSourceRequest() + + request.google_api_source.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'google_api_source.name=name_value', + ) in kw['metadata'] + + +def test_update_google_api_source_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_google_api_source( + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].google_api_source + mock_val = gce_google_api_source.GoogleApiSource(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_google_api_source_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_google_api_source( + eventarc.UpdateGoogleApiSourceRequest(), + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_google_api_source_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_google_api_source( + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].google_api_source + mock_val = gce_google_api_source.GoogleApiSource(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_google_api_source_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_google_api_source( + eventarc.UpdateGoogleApiSourceRequest(), + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteGoogleApiSourceRequest, + dict, +]) +def test_delete_google_api_source(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = eventarc.DeleteGoogleApiSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_google_api_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.DeleteGoogleApiSourceRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_google_api_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteGoogleApiSourceRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_google_api_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_google_api_source] = mock_rpc + request = {} + client.delete_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_google_api_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_google_api_source in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_google_api_source] = mock_rpc + + request = {} + await client.delete_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_google_api_source_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteGoogleApiSourceRequest): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.DeleteGoogleApiSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_google_api_source_async_from_dict(): + await test_delete_google_api_source_async(request_type=dict) + +def test_delete_google_api_source_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteGoogleApiSourceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_google_api_source_field_headers_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteGoogleApiSourceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_google_api_source_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_google_api_source( + name='name_value', + etag='etag_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val + + +def test_delete_google_api_source_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_google_api_source( + eventarc.DeleteGoogleApiSourceRequest(), + name='name_value', + etag='etag_value', + ) + +@pytest.mark.asyncio +async def test_delete_google_api_source_flattened_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_google_api_source( + name='name_value', + etag='etag_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].etag + mock_val = 'etag_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_google_api_source_flattened_error_async(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_google_api_source( + eventarc.DeleteGoogleApiSourceRequest(), + name='name_value', + etag='etag_value', + ) + + +def test_get_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc + + request = {} + client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_trigger(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_get_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_trigger( + eventarc.GetTriggerRequest(), + name='name_value', + ) + + +def test_list_triggers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_triggers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc + + request = {} + client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_triggers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_triggers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_triggers_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_triggers_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_list_triggers_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_triggers( + eventarc.ListTriggersRequest(), + parent='parent_value', + ) + + +def test_list_triggers_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, trigger.Trigger) + for i in results) + + pages = list(client.list_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc + + request = {} + client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "triggerId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == request_init["trigger_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("trigger_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_trigger(request) + + expected_params = [ + ( + "triggerId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", ))) + + +def test_create_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_create_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_trigger( + eventarc.CreateTriggerRequest(), + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + + +def test_update_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc + + request = {} + client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_update_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_trigger( + eventarc.UpdateTriggerRequest(), + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + + +def test_delete_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc + + request = {} + client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_trigger(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) + + +def test_delete_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_trigger( + eventarc.DeleteTriggerRequest(), + name='name_value', + allow_missing=True, + ) + + +def test_get_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc + + request = {} + client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = channel.Channel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_channel(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_get_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel( + eventarc.GetChannelRequest(), + name='name_value', + ) + + +def test_list_channels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_channels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc + + request = {} + client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_channels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_channels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_channels_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_channels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_channels_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_channels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + + +def test_list_channels_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channels( + eventarc.ListChannelsRequest(), + parent='parent_value', + ) + + +def test_list_channels_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListChannelsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_channels(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel.Channel) + for i in results) + + pages = list(client.list_channels(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_channel_ in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc + + request = {} + client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["channel_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "channelId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "channelId" in jsonified_request + assert jsonified_request["channelId"] == request_init["channel_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["channelId"] = 'channel_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("channel_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "channelId" in jsonified_request + assert jsonified_request["channelId"] == 'channel_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_channel(request) + + expected_params = [ + ( + "channelId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_channel_._get_unset_required_fields({}) + assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", ))) + + +def test_create_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + + +def test_create_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel( + eventarc.CreateChannelRequest(), + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + + +def test_update_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc + + request = {} + client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{channel.name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_update_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_channel( + eventarc.UpdateChannelRequest(), + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc + + request = {} + client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_channel(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("name", ))) + + +def test_delete_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_delete_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel( + eventarc.DeleteChannelRequest(), + name='name_value', + ) + + +def test_get_provider_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_provider in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc + + request = {} + client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_provider(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discovery.Provider() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_provider(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_provider_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_provider._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_provider_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_provider(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/providers/*}" % client.transport._host, args[1]) + + +def test_get_provider_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_provider( + eventarc.GetProviderRequest(), + name='name_value', + ) + + +def test_list_providers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_providers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc + + request = {} + client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_providers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_providers_rest_required_fields(request_type=eventarc.ListProvidersRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_providers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_providers_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_providers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_providers_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_providers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/providers" % client.transport._host, args[1]) + + +def test_list_providers_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_providers( + eventarc.ListProvidersRequest(), + parent='parent_value', + ) + + +def test_list_providers_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListProvidersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_providers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, discovery.Provider) + for i in results) + + pages = list(client.list_providers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_channel_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc + + request = {} + client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetChannelConnectionRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_channel_connection(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_channel_connection_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_channel_connection_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_channel_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + + +def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel_connection( + eventarc.GetChannelConnectionRequest(), + name='name_value', + ) + + +def test_list_channel_connections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_channel_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc + + request = {} + client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_channel_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_channel_connections_rest_required_fields(request_type=eventarc.ListChannelConnectionsRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_channel_connections(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_channel_connections_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_channel_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_channel_connections_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_channel_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + + +def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channel_connections( + eventarc.ListChannelConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_channel_connections_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListChannelConnectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_channel_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel_connection.ChannelConnection) + for i in results) + + pages = list(client.list_channel_connections(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_channel_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc + + request = {} + client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_channel_connection_rest_required_fields(request_type=eventarc.CreateChannelConnectionRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["channel_connection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "channelConnectionId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "channelConnectionId" in jsonified_request + assert jsonified_request["channelConnectionId"] == request_init["channel_connection_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["channelConnectionId"] = 'channel_connection_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("channel_connection_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "channelConnectionId" in jsonified_request + assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_channel_connection(request) + + expected_params = [ + ( + "channelConnectionId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_channel_connection_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) + + +def test_create_channel_connection_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_channel_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + + +def test_create_channel_connection_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel_connection( + eventarc.CreateChannelConnectionRequest(), + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + +def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc + + request = {} + client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_channel_connection_rest_required_fields(request_type=eventarc.DeleteChannelConnectionRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_channel_connection(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_channel_connection_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_channel_connection_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_channel_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + + +def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel_connection( + eventarc.DeleteChannelConnectionRequest(), + name='name_value', + ) + + +def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_google_channel_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc + + request = {} + client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_google_channel_config_rest_required_fields(request_type=eventarc.GetGoogleChannelConfigRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_google_channel_config(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_google_channel_config_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_google_channel_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_google_channel_config_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_google_channel_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + + +def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_google_channel_config( + eventarc.GetGoogleChannelConfigRequest(), + name='name_value', + ) + + +def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_google_channel_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc + + request = {} + client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_google_channel_config_rest_required_fields(request_type=eventarc.UpdateGoogleChannelConfigRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_google_channel_config(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_google_channel_config_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_google_channel_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) + + +def test_update_google_channel_config_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + + # get truthy value for each flattened field + mock_args = dict( + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_google_channel_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + + +def test_update_google_channel_config_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_google_channel_config( + eventarc.UpdateGoogleChannelConfigRequest(), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_message_bus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_message_bus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_message_bus] = mock_rpc + + request = {} + client.get_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_message_bus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_message_bus_rest_required_fields(request_type=eventarc.GetMessageBusRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_message_bus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_message_bus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = message_bus.MessageBus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = message_bus.MessageBus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_message_bus(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_message_bus_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_message_bus._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_message_bus_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = message_bus.MessageBus() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = message_bus.MessageBus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_message_bus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/messageBuses/*}" % client.transport._host, args[1]) + + +def test_get_message_bus_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_message_bus( + eventarc.GetMessageBusRequest(), + name='name_value', + ) + + +def test_list_message_buses_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_message_buses in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_message_buses] = mock_rpc + + request = {} + client.list_message_buses(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_message_buses(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_message_buses_rest_required_fields(request_type=eventarc.ListMessageBusesRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_message_buses._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_message_buses._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListMessageBusesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListMessageBusesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_message_buses(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_message_buses_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_message_buses._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_message_buses_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListMessageBusesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListMessageBusesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_message_buses(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/messageBuses" % client.transport._host, args[1]) + + +def test_list_message_buses_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_message_buses( + eventarc.ListMessageBusesRequest(), + parent='parent_value', + ) + + +def test_list_message_buses_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusesResponse( + message_buses=[], + next_page_token='def', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusesResponse( + message_buses=[ + message_bus.MessageBus(), + message_bus.MessageBus(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListMessageBusesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_message_buses(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, message_bus.MessageBus) + for i in results) + + pages = list(client.list_message_buses(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_message_bus_enrollments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_message_bus_enrollments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_message_bus_enrollments] = mock_rpc + + request = {} + client.list_message_bus_enrollments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_message_bus_enrollments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_message_bus_enrollments_rest_required_fields(request_type=eventarc.ListMessageBusEnrollmentsRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_message_bus_enrollments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_message_bus_enrollments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListMessageBusEnrollmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListMessageBusEnrollmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_message_bus_enrollments(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_message_bus_enrollments_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_message_bus_enrollments._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_message_bus_enrollments_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListMessageBusEnrollmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/messageBuses/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListMessageBusEnrollmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_message_bus_enrollments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/messageBuses/*}:listEnrollments" % client.transport._host, args[1]) + + +def test_list_message_bus_enrollments_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_message_bus_enrollments( + eventarc.ListMessageBusEnrollmentsRequest(), + parent='parent_value', + ) + + +def test_list_message_bus_enrollments_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + ], + next_page_token='ghi', + ), + eventarc.ListMessageBusEnrollmentsResponse( + enrollments=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListMessageBusEnrollmentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/messageBuses/sample3'} + + pager = client.list_message_bus_enrollments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) + + pages = list(client.list_message_bus_enrollments(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_message_bus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_message_bus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_message_bus] = mock_rpc + + request = {} + client.create_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_message_bus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_message_bus_rest_required_fields(request_type=eventarc.CreateMessageBusRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["message_bus_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "messageBusId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_message_bus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "messageBusId" in jsonified_request + assert jsonified_request["messageBusId"] == request_init["message_bus_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["messageBusId"] = 'message_bus_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_message_bus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("message_bus_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "messageBusId" in jsonified_request + assert jsonified_request["messageBusId"] == 'message_bus_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_message_bus(request) + + expected_params = [ + ( + "messageBusId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_message_bus_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_message_bus._get_unset_required_fields({}) + assert set(unset_fields) == (set(("messageBusId", "validateOnly", )) & set(("parent", "messageBus", "messageBusId", ))) + + +def test_create_message_bus_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + message_bus=gce_message_bus.MessageBus(name='name_value'), + message_bus_id='message_bus_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_message_bus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/messageBuses" % client.transport._host, args[1]) + + +def test_create_message_bus_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_message_bus( + eventarc.CreateMessageBusRequest(), + parent='parent_value', + message_bus=gce_message_bus.MessageBus(name='name_value'), + message_bus_id='message_bus_id_value', + ) + + +def test_update_message_bus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_message_bus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_message_bus] = mock_rpc + + request = {} + client.update_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_message_bus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_message_bus_rest_required_fields(request_type=eventarc.UpdateMessageBusRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_message_bus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_message_bus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_message_bus(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_message_bus_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_message_bus._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("messageBus", ))) + + +def test_update_message_bus_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'message_bus': {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + message_bus=gce_message_bus.MessageBus(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_message_bus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{message_bus.name=projects/*/locations/*/messageBuses/*}" % client.transport._host, args[1]) + + +def test_update_message_bus_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_message_bus( + eventarc.UpdateMessageBusRequest(), + message_bus=gce_message_bus.MessageBus(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_message_bus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_message_bus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_message_bus] = mock_rpc + + request = {} + client.delete_message_bus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_message_bus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_message_bus_rest_required_fields(request_type=eventarc.DeleteMessageBusRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_message_bus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_message_bus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_message_bus(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_message_bus_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_message_bus._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) + + +def test_delete_message_bus_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + etag='etag_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_message_bus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/messageBuses/*}" % client.transport._host, args[1]) + + +def test_delete_message_bus_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_message_bus( + eventarc.DeleteMessageBusRequest(), + name='name_value', + etag='etag_value', + ) + + +def test_get_enrollment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_enrollment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_enrollment] = mock_rpc + + request = {} + client.get_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_enrollment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_enrollment_rest_required_fields(request_type=eventarc.GetEnrollmentRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_enrollment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_enrollment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = enrollment.Enrollment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = enrollment.Enrollment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_enrollment(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_enrollment_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_enrollment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_enrollment_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = enrollment.Enrollment() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/enrollments/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = enrollment.Enrollment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_enrollment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/enrollments/*}" % client.transport._host, args[1]) + + +def test_get_enrollment_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_enrollment( + eventarc.GetEnrollmentRequest(), + name='name_value', + ) + + +def test_list_enrollments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_enrollments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_enrollments] = mock_rpc + + request = {} + client.list_enrollments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_enrollments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_enrollments_rest_required_fields(request_type=eventarc.ListEnrollmentsRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_enrollments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_enrollments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListEnrollmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListEnrollmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_enrollments(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_enrollments_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_enrollments._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_enrollments_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListEnrollmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListEnrollmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_enrollments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/enrollments" % client.transport._host, args[1]) + + +def test_list_enrollments_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_enrollments( + eventarc.ListEnrollmentsRequest(), + parent='parent_value', + ) + + +def test_list_enrollments_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + next_page_token='abc', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[], + next_page_token='def', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + ], + next_page_token='ghi', + ), + eventarc.ListEnrollmentsResponse( + enrollments=[ + enrollment.Enrollment(), + enrollment.Enrollment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListEnrollmentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_enrollments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, enrollment.Enrollment) + for i in results) + + pages = list(client.list_enrollments(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_enrollment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_enrollment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_enrollment] = mock_rpc + + request = {} + client.create_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_enrollment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_enrollment_rest_required_fields(request_type=eventarc.CreateEnrollmentRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["enrollment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "enrollmentId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_enrollment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "enrollmentId" in jsonified_request + assert jsonified_request["enrollmentId"] == request_init["enrollment_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["enrollmentId"] = 'enrollment_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_enrollment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("enrollment_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "enrollmentId" in jsonified_request + assert jsonified_request["enrollmentId"] == 'enrollment_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_enrollment(request) + + expected_params = [ + ( + "enrollmentId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_enrollment_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_enrollment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("enrollmentId", "validateOnly", )) & set(("parent", "enrollment", "enrollmentId", ))) + + +def test_create_enrollment_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + enrollment=gce_enrollment.Enrollment(name='name_value'), + enrollment_id='enrollment_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_enrollment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/enrollments" % client.transport._host, args[1]) + + +def test_create_enrollment_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_enrollment( + eventarc.CreateEnrollmentRequest(), + parent='parent_value', + enrollment=gce_enrollment.Enrollment(name='name_value'), + enrollment_id='enrollment_id_value', + ) + + +def test_update_enrollment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_enrollment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_enrollment] = mock_rpc + + request = {} + client.update_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_enrollment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_enrollment_rest_required_fields(request_type=eventarc.UpdateEnrollmentRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_enrollment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_enrollment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_enrollment(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_enrollment_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_enrollment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("enrollment", ))) + + +def test_update_enrollment_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'enrollment': {'name': 'projects/sample1/locations/sample2/enrollments/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + enrollment=gce_enrollment.Enrollment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_enrollment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{enrollment.name=projects/*/locations/*/enrollments/*}" % client.transport._host, args[1]) + + +def test_update_enrollment_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_enrollment( + eventarc.UpdateEnrollmentRequest(), + enrollment=gce_enrollment.Enrollment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_enrollment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_enrollment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_enrollment] = mock_rpc + + request = {} + client.delete_enrollment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_enrollment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_enrollment_rest_required_fields(request_type=eventarc.DeleteEnrollmentRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_enrollment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_enrollment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_enrollment(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_enrollment_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_enrollment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) + + +def test_delete_enrollment_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/enrollments/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + etag='etag_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_enrollment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/enrollments/*}" % client.transport._host, args[1]) + + +def test_delete_enrollment_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_enrollment( + eventarc.DeleteEnrollmentRequest(), + name='name_value', + etag='etag_value', + ) + + +def test_get_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_pipeline in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_pipeline] = mock_rpc + + request = {} + client.get_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_pipeline_rest_required_fields(request_type=eventarc.GetPipelineRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pipeline.Pipeline() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pipeline.Pipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_pipeline(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_pipeline_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_pipeline_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = pipeline.Pipeline() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/pipelines/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pipeline.Pipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/pipelines/*}" % client.transport._host, args[1]) + + +def test_get_pipeline_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_pipeline( + eventarc.GetPipelineRequest(), + name='name_value', + ) + + +def test_list_pipelines_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_pipelines in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_pipelines] = mock_rpc + + request = {} + client.list_pipelines(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_pipelines(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_pipelines_rest_required_fields(request_type=eventarc.ListPipelinesRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_pipelines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_pipelines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListPipelinesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_pipelines(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_pipelines_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_pipelines._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_pipelines_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListPipelinesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_pipelines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/pipelines" % client.transport._host, args[1]) + + +def test_list_pipelines_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_pipelines( + eventarc.ListPipelinesRequest(), + parent='parent_value', + ) + + +def test_list_pipelines_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + next_page_token='abc', + ), + eventarc.ListPipelinesResponse( + pipelines=[], + next_page_token='def', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + ], + next_page_token='ghi', + ), + eventarc.ListPipelinesResponse( + pipelines=[ + pipeline.Pipeline(), + pipeline.Pipeline(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListPipelinesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_pipelines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pipeline.Pipeline) + for i in results) + + pages = list(client.list_pipelines(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_pipeline in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_pipeline] = mock_rpc + + request = {} + client.create_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_pipeline_rest_required_fields(request_type=eventarc.CreatePipelineRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["pipeline_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "pipelineId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "pipelineId" in jsonified_request + assert jsonified_request["pipelineId"] == request_init["pipeline_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["pipelineId"] = 'pipeline_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("pipeline_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "pipelineId" in jsonified_request + assert jsonified_request["pipelineId"] == 'pipeline_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_pipeline(request) + + expected_params = [ + ( + "pipelineId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_pipeline_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pipelineId", "validateOnly", )) & set(("parent", "pipeline", "pipelineId", ))) + + +def test_create_pipeline_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + pipeline=gce_pipeline.Pipeline(name='name_value'), + pipeline_id='pipeline_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/pipelines" % client.transport._host, args[1]) + + +def test_create_pipeline_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_pipeline( + eventarc.CreatePipelineRequest(), + parent='parent_value', + pipeline=gce_pipeline.Pipeline(name='name_value'), + pipeline_id='pipeline_id_value', + ) + + +def test_update_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_pipeline in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_pipeline] = mock_rpc + + request = {} + client.update_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_pipeline_rest_required_fields(request_type=eventarc.UpdatePipelineRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_pipeline(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_pipeline_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("pipeline", ))) + + +def test_update_pipeline_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'pipeline': {'name': 'projects/sample1/locations/sample2/pipelines/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + pipeline=gce_pipeline.Pipeline(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{pipeline.name=projects/*/locations/*/pipelines/*}" % client.transport._host, args[1]) + + +def test_update_pipeline_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_pipeline( + eventarc.UpdatePipelineRequest(), + pipeline=gce_pipeline.Pipeline(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_pipeline in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_pipeline] = mock_rpc + + request = {} + client.delete_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_pipeline_rest_required_fields(request_type=eventarc.DeletePipelineRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_pipeline(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_pipeline_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) + + +def test_delete_pipeline_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/pipelines/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + etag='etag_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/pipelines/*}" % client.transport._host, args[1]) + + +def test_delete_pipeline_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_pipeline( + eventarc.DeletePipelineRequest(), + name='name_value', + etag='etag_value', + ) + + +def test_get_google_api_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_google_api_source] = mock_rpc + + request = {} + client.get_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_google_api_source_rest_required_fields(request_type=eventarc.GetGoogleApiSourceRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_api_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_api_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = google_api_source.GoogleApiSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = google_api_source.GoogleApiSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_google_api_source(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_google_api_source_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_google_api_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_google_api_source_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_api_source.GoogleApiSource() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = google_api_source.GoogleApiSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_google_api_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/googleApiSources/*}" % client.transport._host, args[1]) + + +def test_get_google_api_source_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_google_api_source( + eventarc.GetGoogleApiSourceRequest(), + name='name_value', + ) + + +def test_list_google_api_sources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_google_api_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_google_api_sources] = mock_rpc + + request = {} + client.list_google_api_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_google_api_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_google_api_sources_rest_required_fields(request_type=eventarc.ListGoogleApiSourcesRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_google_api_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_google_api_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListGoogleApiSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListGoogleApiSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_google_api_sources(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_google_api_sources_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_google_api_sources._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_google_api_sources_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListGoogleApiSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = eventarc.ListGoogleApiSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_google_api_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/googleApiSources" % client.transport._host, args[1]) + + +def test_list_google_api_sources_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_google_api_sources( + eventarc.ListGoogleApiSourcesRequest(), + parent='parent_value', + ) + + +def test_list_google_api_sources_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + next_page_token='abc', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[], + next_page_token='def', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + ], + next_page_token='ghi', + ), + eventarc.ListGoogleApiSourcesResponse( + google_api_sources=[ + google_api_source.GoogleApiSource(), + google_api_source.GoogleApiSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListGoogleApiSourcesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_google_api_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, google_api_source.GoogleApiSource) + for i in results) + + pages = list(client.list_google_api_sources(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_google_api_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_google_api_source] = mock_rpc + + request = {} + client.create_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_google_api_source_rest_required_fields(request_type=eventarc.CreateGoogleApiSourceRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["google_api_source_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "googleApiSourceId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_google_api_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "googleApiSourceId" in jsonified_request + assert jsonified_request["googleApiSourceId"] == request_init["google_api_source_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["googleApiSourceId"] = 'google_api_source_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_google_api_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("google_api_source_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "googleApiSourceId" in jsonified_request + assert jsonified_request["googleApiSourceId"] == 'google_api_source_id_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_google_api_source(request) + + expected_params = [ + ( + "googleApiSourceId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_google_api_source_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_google_api_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(("googleApiSourceId", "validateOnly", )) & set(("parent", "googleApiSource", "googleApiSourceId", ))) + + +def test_create_google_api_source_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + google_api_source_id='google_api_source_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_google_api_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/googleApiSources" % client.transport._host, args[1]) + + +def test_create_google_api_source_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_google_api_source( + eventarc.CreateGoogleApiSourceRequest(), + parent='parent_value', + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + google_api_source_id='google_api_source_id_value', + ) + + +def test_update_google_api_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_google_api_source] = mock_rpc + + request = {} + client.update_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_google_api_source_rest_required_fields(request_type=eventarc.UpdateGoogleApiSourceRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_api_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_api_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_google_api_source(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_google_api_source_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_google_api_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("googleApiSource", ))) + + +def test_update_google_api_source_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'google_api_source': {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_google_api_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{google_api_source.name=projects/*/locations/*/googleApiSources/*}" % client.transport._host, args[1]) + + +def test_update_google_api_source_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_google_api_source( + eventarc.UpdateGoogleApiSourceRequest(), + google_api_source=gce_google_api_source.GoogleApiSource(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_google_api_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_google_api_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_google_api_source] = mock_rpc + + request = {} + client.delete_google_api_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_google_api_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_google_api_source_rest_required_fields(request_type=eventarc.DeleteGoogleApiSourceRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_google_api_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_google_api_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_google_api_source(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_google_api_source_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_google_api_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) + + +def test_delete_google_api_source_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + etag='etag_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_google_api_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/googleApiSources/*}" % client.transport._host, args[1]) + + +def test_delete_google_api_source_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_google_api_source( + eventarc.DeleteGoogleApiSourceRequest(), + name='name_value', + etag='etag_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EventarcClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EventarcClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EventarcClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.EventarcGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = EventarcClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + call.return_value = trigger.Trigger() + client.get_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_triggers_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + call.return_value = eventarc.ListTriggersResponse() + client.list_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + call.return_value = channel.Channel() + client.get_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channels_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + call.return_value = eventarc.ListChannelsResponse() + client.list_channels(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_provider_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + call.return_value = discovery.Provider() + client.get_provider(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetProviderRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_providers_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + call.return_value = eventarc.ListProvidersResponse() + client.list_providers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListProvidersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + call.return_value = channel_connection.ChannelConnection() + client.get_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channel_connections_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + call.return_value = eventarc.ListChannelConnectionsResponse() + client.list_channel_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_google_channel_config_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + call.return_value = google_channel_config.GoogleChannelConfig() + client.get_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_google_channel_config_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + call.return_value = gce_google_channel_config.GoogleChannelConfig() + client.update_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_message_bus_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + call.return_value = message_bus.MessageBus() + client.get_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_message_buses_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + call.return_value = eventarc.ListMessageBusesResponse() + client.list_message_buses(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListMessageBusesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_message_bus_enrollments_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + call.return_value = eventarc.ListMessageBusEnrollmentsResponse() + client.list_message_bus_enrollments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListMessageBusEnrollmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_message_bus_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_message_bus_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_message_bus_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_enrollment_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + call.return_value = enrollment.Enrollment() + client.get_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_enrollments_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + call.return_value = eventarc.ListEnrollmentsResponse() + client.list_enrollments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListEnrollmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_enrollment_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_enrollment_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_enrollment_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_pipeline_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + call.return_value = pipeline.Pipeline() + client.get_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetPipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_pipelines_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + call.return_value = eventarc.ListPipelinesResponse() + client.list_pipelines(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListPipelinesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_pipeline_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_pipeline), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreatePipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_pipeline_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_pipeline), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdatePipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_pipeline_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_pipeline), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeletePipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_google_api_source_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_api_source), + '__call__') as call: + call.return_value = google_api_source.GoogleApiSource() + client.get_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleApiSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_google_api_sources_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + call.return_value = eventarc.ListGoogleApiSourcesResponse() + client.list_google_api_sources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListGoogleApiSourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_google_api_source_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateGoogleApiSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_google_api_source_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleApiSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_google_api_source_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteGoogleApiSourceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = EventarcAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + event_data_content_type='event_data_content_type_value', + satisfies_pzs=True, + etag='etag_value', + )) + await client.get_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_triggers_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + satisfies_pzs=True, + )) + await client.get_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_channels_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_channels(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_provider_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( + name='name_value', + display_name='display_name_value', + )) + await client.get_provider(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetProviderRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_providers_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_providers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListProvidersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + )) + await client.get_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_channel_connections_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_channel_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_google_channel_config_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + await client.get_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_google_channel_config_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + await client.update_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_message_bus_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(message_bus.MessageBus( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + crypto_key_name='crypto_key_name_value', + )) + await client.get_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_message_buses_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_message_buses(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListMessageBusesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_message_bus_enrollments_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListMessageBusEnrollmentsResponse( + enrollments=['enrollments_value'], + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_message_bus_enrollments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListMessageBusEnrollmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_message_bus_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_message_bus_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_message_bus_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_message_bus), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteMessageBusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_enrollment_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(enrollment.Enrollment( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + cel_match='cel_match_value', + message_bus='message_bus_value', + destination='destination_value', + )) + await client.get_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_enrollments_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_enrollments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListEnrollmentsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_enrollments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListEnrollmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_enrollment_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_enrollment_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_enrollment_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_enrollment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_enrollment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteEnrollmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_pipeline_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pipeline.Pipeline( + name='name_value', + uid='uid_value', + display_name='display_name_value', + crypto_key_name='crypto_key_name_value', + etag='etag_value', + satisfies_pzs=True, + )) + await client.get_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetPipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_pipelines_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_pipelines), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListPipelinesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_pipelines(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListPipelinesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_pipeline_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreatePipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_pipeline_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdatePipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_pipeline_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_pipeline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeletePipelineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_google_api_source_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_api_source.GoogleApiSource( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + destination='destination_value', + crypto_key_name='crypto_key_name_value', + )) + await client.get_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleApiSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_google_api_sources_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_google_api_sources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListGoogleApiSourcesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_google_api_sources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListGoogleApiSourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_google_api_source_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateGoogleApiSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_google_api_source_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleApiSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_google_api_source_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_google_api_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_google_api_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteGoogleApiSourceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = EventarcClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetTriggerRequest, + dict, +]) +def test_get_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + event_data_content_type='event_data_content_type_value', + satisfies_pzs=True, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' + assert response.event_data_content_type == 'event_data_content_type_value' + assert response.satisfies_pzs is True + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = trigger.Trigger.to_json(trigger.Trigger()) + req.return_value.content = return_value + + request = eventarc.GetTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = trigger.Trigger() + post_with_metadata.return_value = trigger.Trigger(), metadata + + client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_triggers(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListTriggersRequest, + dict, +]) +def test_list_triggers_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTriggersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_triggers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + req.return_value.content = return_value + + request = eventarc.ListTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListTriggersResponse() + post_with_metadata.return_value = eventarc.ListTriggersResponse(), metadata + + client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateTriggerRequest, + dict, +]) +def test_create_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value', 'http_endpoint': {'uri': 'uri_value'}, 'network_config': {'network_attachment': 'network_attachment_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'event_data_content_type': 'event_data_content_type_value', 'satisfies_pzs': True, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateTriggerRequest.meta.fields["trigger"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_trigger(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateTriggerRequest, + dict, +]) +def test_update_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value', 'http_endpoint': {'uri': 'uri_value'}, 'network_config': {'network_attachment': 'network_attachment_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'event_data_content_type': 'event_data_content_type_value', 'satisfies_pzs': True, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateTriggerRequest.meta.fields["trigger"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_trigger(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.UpdateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteTriggerRequest, + dict, +]) +def test_delete_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_trigger(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelRequest, + dict, +]) +def test_get_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + satisfies_pzs=True, + pubsub_topic='pubsub_topic_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_channel(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = channel.Channel.to_json(channel.Channel()) + req.return_value.content = return_value + + request = eventarc.GetChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel.Channel() + post_with_metadata.return_value = channel.Channel(), metadata + + client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_channels(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelsRequest, + dict, +]) +def test_list_channels_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_channels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channels_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) + req.return_value.content = return_value + + request = eventarc.ListChannelsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelsResponse() + post_with_metadata.return_value = eventarc.ListChannelsResponse(), metadata + + client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelRequest, + dict, +]) +def test_create_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value', 'satisfies_pzs': True, 'labels': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateChannelRequest.meta.fields["channel"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateChannelRequest, + dict, +]) +def test_update_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value', 'satisfies_pzs': True, 'labels': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateChannelRequest.meta.fields["channel"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.UpdateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelRequest, + dict, +]) +def test_delete_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_provider(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_provider(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_provider_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = discovery.Provider.to_json(discovery.Provider()) + req.return_value.content = return_value + + request = eventarc.GetProviderRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discovery.Provider() + post_with_metadata.return_value = discovery.Provider(), metadata + + client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_providers(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListProvidersRequest, + dict, +]) +def test_list_providers_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_providers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_providers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) + req.return_value.content = return_value + + request = eventarc.ListProvidersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListProvidersResponse() + post_with_metadata.return_value = eventarc.ListProvidersResponse(), metadata + + client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_channel_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) + req.return_value.content = return_value + + request = eventarc.GetChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel_connection.ChannelConnection() + post_with_metadata.return_value = channel_connection.ChannelConnection(), metadata + + client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListChannelConnectionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_channel_connections(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelConnectionsRequest, + dict, +]) +def test_list_channel_connections_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_channel_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channel_connections_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) + req.return_value.content = return_value + + request = eventarc.ListChannelConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelConnectionsResponse() + post_with_metadata.return_value = eventarc.ListChannelConnectionsResponse(), metadata + + client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_channel_connection_rest_bad_request(request_type=eventarc.CreateChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelConnectionRequest, + dict, +]) +def test_create_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value', 'labels': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel_connection"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel_connection"][field])): + del request_init["channel_connection"][field][i][subfield] + else: + del request_init["channel_connection"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_channel_connection(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_channel_connection_rest_bad_request(request_type=eventarc.DeleteChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelConnectionRequest, + dict, +]) +def test_delete_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_channel_connection(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_google_channel_config(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleChannelConfigRequest, + dict, +]) +def test_get_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) + req.return_value.content = return_value + + request = eventarc.GetGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = google_channel_config.GoogleChannelConfig(), metadata + + client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_google_channel_config_rest_bad_request(request_type=eventarc.UpdateGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_google_channel_config(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, +]) +def test_update_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value', 'labels': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_channel_config"][field])): + del request_init["google_channel_config"][field][i][subfield] + else: + del request_init["google_channel_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) + req.return_value.content = return_value + + request = eventarc.UpdateGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gce_google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = gce_google_channel_config.GoogleChannelConfig(), metadata + + client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_message_bus_rest_bad_request(request_type=eventarc.GetMessageBusRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_message_bus(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetMessageBusRequest, + dict, +]) +def test_get_message_bus_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = message_bus.MessageBus( + name='name_value', + uid='uid_value', + etag='etag_value', + display_name='display_name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = message_bus.MessageBus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_message_bus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, message_bus.MessageBus) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_message_bus_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_message_bus") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_message_bus_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_message_bus") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.GetMessageBusRequest.pb(eventarc.GetMessageBusRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = message_bus.MessageBus.to_json(message_bus.MessageBus()) + req.return_value.content = return_value + + request = eventarc.GetMessageBusRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = message_bus.MessageBus() + post_with_metadata.return_value = message_bus.MessageBus(), metadata + + client.get_message_bus(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_message_buses_rest_bad_request(request_type=eventarc.ListMessageBusesRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_message_buses(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListMessageBusesRequest, + dict, +]) +def test_list_message_buses_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListMessageBusesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListMessageBusesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_message_buses(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMessageBusesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_message_buses_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_message_buses") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_message_buses_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_message_buses") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListMessageBusesRequest.pb(eventarc.ListMessageBusesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListMessageBusesResponse.to_json(eventarc.ListMessageBusesResponse()) + req.return_value.content = return_value + + request = eventarc.ListMessageBusesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListMessageBusesResponse() + post_with_metadata.return_value = eventarc.ListMessageBusesResponse(), metadata + + client.list_message_buses(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_message_bus_enrollments_rest_bad_request(request_type=eventarc.ListMessageBusEnrollmentsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/messageBuses/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_message_bus_enrollments(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListMessageBusEnrollmentsRequest, + dict, +]) +def test_list_message_bus_enrollments_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/messageBuses/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListMessageBusEnrollmentsResponse( + enrollments=['enrollments_value'], + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListMessageBusEnrollmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_message_bus_enrollments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMessageBusEnrollmentsPager) + assert response.enrollments == ['enrollments_value'] + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_message_bus_enrollments_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_message_bus_enrollments") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_message_bus_enrollments_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_message_bus_enrollments") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.ListMessageBusEnrollmentsRequest.pb(eventarc.ListMessageBusEnrollmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = eventarc.ListMessageBusEnrollmentsResponse.to_json(eventarc.ListMessageBusEnrollmentsResponse()) + req.return_value.content = return_value + + request = eventarc.ListMessageBusEnrollmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListMessageBusEnrollmentsResponse() + post_with_metadata.return_value = eventarc.ListMessageBusEnrollmentsResponse(), metadata + + client.list_message_bus_enrollments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_message_bus_rest_bad_request(request_type=eventarc.CreateMessageBusRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_message_bus(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateMessageBusRequest, + dict, +]) +def test_create_message_bus_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["message_bus"] = {'name': 'name_value', 'uid': 'uid_value', 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'annotations': {}, 'display_name': 'display_name_value', 'crypto_key_name': 'crypto_key_name_value', 'logging_config': {'log_severity': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateMessageBusRequest.meta.fields["message_bus"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["message_bus"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["message_bus"][field])): + del request_init["message_bus"][field][i][subfield] + else: + del request_init["message_bus"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_message_bus(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_message_bus_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_message_bus") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_message_bus_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_message_bus") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.CreateMessageBusRequest.pb(eventarc.CreateMessageBusRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateMessageBusRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_message_bus(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_message_bus_rest_bad_request(request_type=eventarc.UpdateMessageBusRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'message_bus': {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_message_bus(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateMessageBusRequest, + dict, +]) +def test_update_message_bus_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'message_bus': {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'}} + request_init["message_bus"] = {'name': 'projects/sample1/locations/sample2/messageBuses/sample3', 'uid': 'uid_value', 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'annotations': {}, 'display_name': 'display_name_value', 'crypto_key_name': 'crypto_key_name_value', 'logging_config': {'log_severity': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateMessageBusRequest.meta.fields["message_bus"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["message_bus"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["message_bus"][field])): + del request_init["message_bus"][field][i][subfield] + else: + del request_init["message_bus"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_message_bus(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_message_bus_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_message_bus") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_message_bus_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_message_bus") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.UpdateMessageBusRequest.pb(eventarc.UpdateMessageBusRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.UpdateMessageBusRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_message_bus(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - assert args[0] == request_msg + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_channel_connection_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_message_bus_rest_bad_request(request_type=eventarc.DeleteMessageBusRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'} + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_channel_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteChannelConnectionRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_message_bus(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_google_channel_config_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteMessageBusRequest, + dict, +]) +def test_delete_message_bus_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) - await client.get_google_channel_config(request=None) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/messageBuses/sample3'} + request = request_type(**request_init) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetGoogleChannelConfigRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_message_bus(request) + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_google_channel_config_empty_call_grpc_asyncio(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) - await client.update_google_channel_config(request=None) +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_message_bus_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateGoogleChannelConfigRequest() + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_message_bus") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_message_bus_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_message_bus") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = eventarc.DeleteMessageBusRequest.pb(eventarc.DeleteMessageBusRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - assert args[0] == request_msg + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + request = eventarc.DeleteMessageBusRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata -def test_transport_kind_rest(): - transport = EventarcClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + client.delete_message_bus(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): +def test_get_enrollment_rest_bad_request(request_type=eventarc.GetEnrollmentRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/enrollments/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11828,32 +27521,34 @@ def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_trigger(request) + client.get_enrollment(request) @pytest.mark.parametrize("request_type", [ - eventarc.GetTriggerRequest, + eventarc.GetEnrollmentRequest, dict, ]) -def test_get_trigger_rest_call_success(request_type): +def test_get_enrollment_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/enrollments/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = trigger.Trigger( + return_value = enrollment.Enrollment( name='name_value', uid='uid_value', - service_account='service_account_value', - channel='channel_value', etag='etag_value', + display_name='display_name_value', + cel_match='cel_match_value', + message_bus='message_bus_value', + destination='destination_value', ) # Wrap the value into a proper Response obj @@ -11861,24 +27556,26 @@ def test_get_trigger_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = trigger.Trigger.pb(return_value) + return_value = enrollment.Enrollment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_trigger(request) + response = client.get_enrollment(request) # Establish that the response is the type that we expect. - assert isinstance(response, trigger.Trigger) + assert isinstance(response, enrollment.Enrollment) assert response.name == 'name_value' assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' assert response.etag == 'etag_value' + assert response.display_name == 'display_name_value' + assert response.cel_match == 'cel_match_value' + assert response.message_bus == 'message_bus_value' + assert response.destination == 'destination_value' @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_trigger_rest_interceptors(null_interceptor): +def test_get_enrollment_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -11887,13 +27584,13 @@ def test_get_trigger_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_get_enrollment") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_enrollment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_enrollment") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + pb_message = eventarc.GetEnrollmentRequest.pb(eventarc.GetEnrollmentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11904,26 +27601,26 @@ def test_get_trigger_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = trigger.Trigger.to_json(trigger.Trigger()) + return_value = enrollment.Enrollment.to_json(enrollment.Enrollment()) req.return_value.content = return_value - request = eventarc.GetTriggerRequest() + request = eventarc.GetEnrollmentRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = trigger.Trigger() - post_with_metadata.return_value = trigger.Trigger(), metadata + post.return_value = enrollment.Enrollment() + post_with_metadata.return_value = enrollment.Enrollment(), metadata - client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_enrollment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): +def test_list_enrollments_rest_bad_request(request_type=eventarc.ListEnrollmentsRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11942,14 +27639,14 @@ def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_triggers(request) + client.list_enrollments(request) @pytest.mark.parametrize("request_type", [ - eventarc.ListTriggersRequest, + eventarc.ListEnrollmentsRequest, dict, ]) -def test_list_triggers_rest_call_success(request_type): +def test_list_enrollments_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11962,7 +27659,7 @@ def test_list_triggers_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse( + return_value = eventarc.ListEnrollmentsResponse( next_page_token='next_page_token_value', unreachable=['unreachable_value'], ) @@ -11972,21 +27669,21 @@ def test_list_triggers_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = eventarc.ListTriggersResponse.pb(return_value) + return_value = eventarc.ListEnrollmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_triggers(request) + response = client.list_enrollments(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTriggersPager) + assert isinstance(response, pagers.ListEnrollmentsPager) assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_triggers_rest_interceptors(null_interceptor): +def test_list_enrollments_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -11995,13 +27692,13 @@ def test_list_triggers_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_list_enrollments") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_enrollments_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_enrollments") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + pb_message = eventarc.ListEnrollmentsRequest.pb(eventarc.ListEnrollmentsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12012,26 +27709,26 @@ def test_list_triggers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + return_value = eventarc.ListEnrollmentsResponse.to_json(eventarc.ListEnrollmentsResponse()) req.return_value.content = return_value - request = eventarc.ListTriggersRequest() + request = eventarc.ListEnrollmentsRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = eventarc.ListTriggersResponse() - post_with_metadata.return_value = eventarc.ListTriggersResponse(), metadata + post.return_value = eventarc.ListEnrollmentsResponse() + post_with_metadata.return_value = eventarc.ListEnrollmentsResponse(), metadata - client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_enrollments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequest): +def test_create_enrollment_rest_bad_request(request_type=eventarc.CreateEnrollmentRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12050,14 +27747,14 @@ def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_trigger(request) + client.create_enrollment(request) @pytest.mark.parametrize("request_type", [ - eventarc.CreateTriggerRequest, + eventarc.CreateEnrollmentRequest, dict, ]) -def test_create_trigger_rest_call_success(request_type): +def test_create_enrollment_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12065,13 +27762,13 @@ def test_create_trigger_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request_init["enrollment"] = {'name': 'name_value', 'uid': 'uid_value', 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'annotations': {}, 'display_name': 'display_name_value', 'cel_match': 'cel_match_value', 'message_bus': 'message_bus_value', 'destination': 'destination_value'} # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateTriggerRequest.meta.fields["trigger"] + test_field = eventarc.CreateEnrollmentRequest.meta.fields["enrollment"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12099,7 +27796,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER + for field, value in request_init["enrollment"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12125,10 +27822,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["trigger"][field])): - del request_init["trigger"][field][i][subfield] + for i in range(0, len(request_init["enrollment"][field])): + del request_init["enrollment"][field][i][subfield] else: - del request_init["trigger"][field][subfield] + del request_init["enrollment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12143,14 +27840,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_trigger(request) + response = client.create_enrollment(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_trigger_rest_interceptors(null_interceptor): +def test_create_enrollment_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -12160,13 +27857,13 @@ def test_create_trigger_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_create_enrollment") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_enrollment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_enrollment") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + pb_message = eventarc.CreateEnrollmentRequest.pb(eventarc.CreateEnrollmentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12180,7 +27877,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.CreateTriggerRequest() + request = eventarc.CreateEnrollmentRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -12189,20 +27886,20 @@ def test_create_trigger_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_enrollment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequest): +def test_update_enrollment_rest_bad_request(request_type=eventarc.UpdateEnrollmentRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init = {'enrollment': {'name': 'projects/sample1/locations/sample2/enrollments/sample3'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12215,28 +27912,28 @@ def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_trigger(request) + client.update_enrollment(request) @pytest.mark.parametrize("request_type", [ - eventarc.UpdateTriggerRequest, + eventarc.UpdateEnrollmentRequest, dict, ]) -def test_update_trigger_rest_call_success(request_type): +def test_update_enrollment_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request_init = {'enrollment': {'name': 'projects/sample1/locations/sample2/enrollments/sample3'}} + request_init["enrollment"] = {'name': 'projects/sample1/locations/sample2/enrollments/sample3', 'uid': 'uid_value', 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'annotations': {}, 'display_name': 'display_name_value', 'cel_match': 'cel_match_value', 'message_bus': 'message_bus_value', 'destination': 'destination_value'} # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateTriggerRequest.meta.fields["trigger"] + test_field = eventarc.UpdateEnrollmentRequest.meta.fields["enrollment"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12264,7 +27961,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER + for field, value in request_init["enrollment"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12290,10 +27987,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["trigger"][field])): - del request_init["trigger"][field][i][subfield] + for i in range(0, len(request_init["enrollment"][field])): + del request_init["enrollment"][field][i][subfield] else: - del request_init["trigger"][field][subfield] + del request_init["enrollment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12308,14 +28005,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_trigger(request) + response = client.update_enrollment(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_trigger_rest_interceptors(null_interceptor): +def test_update_enrollment_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -12325,13 +28022,13 @@ def test_update_trigger_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_update_enrollment") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_enrollment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_enrollment") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + pb_message = eventarc.UpdateEnrollmentRequest.pb(eventarc.UpdateEnrollmentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12345,7 +28042,7 @@ def test_update_trigger_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.UpdateTriggerRequest() + request = eventarc.UpdateEnrollmentRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -12354,20 +28051,20 @@ def test_update_trigger_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_enrollment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequest): +def test_delete_enrollment_rest_bad_request(request_type=eventarc.DeleteEnrollmentRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/enrollments/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12380,21 +28077,21 @@ def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_trigger(request) + client.delete_enrollment(request) @pytest.mark.parametrize("request_type", [ - eventarc.DeleteTriggerRequest, + eventarc.DeleteEnrollmentRequest, dict, ]) -def test_delete_trigger_rest_call_success(request_type): +def test_delete_enrollment_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/enrollments/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12409,14 +28106,14 @@ def test_delete_trigger_rest_call_success(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_trigger(request) + response = client.delete_enrollment(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_trigger_rest_interceptors(null_interceptor): +def test_delete_enrollment_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -12426,13 +28123,13 @@ def test_delete_trigger_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_enrollment") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_enrollment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_enrollment") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + pb_message = eventarc.DeleteEnrollmentRequest.pb(eventarc.DeleteEnrollmentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12446,7 +28143,7 @@ def test_delete_trigger_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.DeleteTriggerRequest() + request = eventarc.DeleteEnrollmentRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -12455,20 +28152,20 @@ def test_delete_trigger_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_enrollment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): +def test_get_pipeline_rest_bad_request(request_type=eventarc.GetPipelineRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/pipelines/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12481,34 +28178,33 @@ def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_channel(request) + client.get_pipeline(request) @pytest.mark.parametrize("request_type", [ - eventarc.GetChannelRequest, + eventarc.GetPipelineRequest, dict, ]) -def test_get_channel_rest_call_success(request_type): +def test_get_pipeline_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/pipelines/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = channel.Channel( + return_value = pipeline.Pipeline( name='name_value', uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', + display_name='display_name_value', crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', + etag='etag_value', + satisfies_pzs=True, ) # Wrap the value into a proper Response obj @@ -12516,25 +28212,25 @@ def test_get_channel_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = channel.Channel.pb(return_value) + return_value = pipeline.Pipeline.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_channel(request) + response = client.get_pipeline(request) # Establish that the response is the type that we expect. - assert isinstance(response, channel.Channel) + assert isinstance(response, pipeline.Pipeline) assert response.name == 'name_value' assert response.uid == 'uid_value' - assert response.provider == 'provider_value' - assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' + assert response.display_name == 'display_name_value' assert response.crypto_key_name == 'crypto_key_name_value' + assert response.etag == 'etag_value' + assert response.satisfies_pzs is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_channel_rest_interceptors(null_interceptor): +def test_get_pipeline_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -12543,13 +28239,13 @@ def test_get_channel_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_get_pipeline") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_pipeline_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_pipeline") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) + pb_message = eventarc.GetPipelineRequest.pb(eventarc.GetPipelineRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12560,26 +28256,26 @@ def test_get_channel_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = channel.Channel.to_json(channel.Channel()) + return_value = pipeline.Pipeline.to_json(pipeline.Pipeline()) req.return_value.content = return_value - request = eventarc.GetChannelRequest() + request = eventarc.GetPipelineRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = channel.Channel() - post_with_metadata.return_value = channel.Channel(), metadata + post.return_value = pipeline.Pipeline() + post_with_metadata.return_value = pipeline.Pipeline(), metadata - client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_pipeline(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): +def test_list_pipelines_rest_bad_request(request_type=eventarc.ListPipelinesRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12598,14 +28294,14 @@ def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_channels(request) + client.list_pipelines(request) @pytest.mark.parametrize("request_type", [ - eventarc.ListChannelsRequest, + eventarc.ListPipelinesRequest, dict, ]) -def test_list_channels_rest_call_success(request_type): +def test_list_pipelines_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12618,7 +28314,7 @@ def test_list_channels_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelsResponse( + return_value = eventarc.ListPipelinesResponse( next_page_token='next_page_token_value', unreachable=['unreachable_value'], ) @@ -12628,21 +28324,21 @@ def test_list_channels_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = eventarc.ListChannelsResponse.pb(return_value) + return_value = eventarc.ListPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_channels(request) + response = client.list_pipelines(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelsPager) + assert isinstance(response, pagers.ListPipelinesPager) assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_channels_rest_interceptors(null_interceptor): +def test_list_pipelines_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -12651,13 +28347,13 @@ def test_list_channels_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_list_pipelines") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_pipelines_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_pipelines") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) + pb_message = eventarc.ListPipelinesRequest.pb(eventarc.ListPipelinesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12668,26 +28364,26 @@ def test_list_channels_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) + return_value = eventarc.ListPipelinesResponse.to_json(eventarc.ListPipelinesResponse()) req.return_value.content = return_value - request = eventarc.ListChannelsRequest() + request = eventarc.ListPipelinesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = eventarc.ListChannelsResponse() - post_with_metadata.return_value = eventarc.ListChannelsResponse(), metadata + post.return_value = eventarc.ListPipelinesResponse() + post_with_metadata.return_value = eventarc.ListPipelinesResponse(), metadata - client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_pipelines(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequest): +def test_create_pipeline_rest_bad_request(request_type=eventarc.CreatePipelineRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12706,14 +28402,14 @@ def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_channel(request) + client.create_pipeline(request) @pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelRequest, + eventarc.CreatePipelineRequest, dict, ]) -def test_create_channel_rest_call_success(request_type): +def test_create_pipeline_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12721,13 +28417,13 @@ def test_create_channel_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request_init["pipeline"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'uid': 'uid_value', 'annotations': {}, 'display_name': 'display_name_value', 'destinations': [{'network_config': {'network_attachment': 'network_attachment_value'}, 'http_endpoint': {'uri': 'uri_value', 'message_binding_template': 'message_binding_template_value'}, 'workflow': 'workflow_value', 'message_bus': 'message_bus_value', 'topic': 'topic_value', 'authentication_config': {'google_oidc': {'service_account': 'service_account_value', 'audience': 'audience_value'}, 'oauth_token': {'service_account': 'service_account_value', 'scope': 'scope_value'}}, 'output_payload_format': {'protobuf': {'schema_definition': 'schema_definition_value'}, 'avro': {'schema_definition': 'schema_definition_value'}, 'json': {}}}], 'mediations': [{'transformation': {'transformation_template': 'transformation_template_value'}}], 'crypto_key_name': 'crypto_key_name_value', 'input_payload_format': {}, 'logging_config': {'log_severity': 1}, 'retry_policy': {'max_attempts': 1303, 'min_retry_delay': {'seconds': 751, 'nanos': 543}, 'max_retry_delay': {}}, 'etag': 'etag_value', 'satisfies_pzs': True} # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateChannelRequest.meta.fields["channel"] + test_field = eventarc.CreatePipelineRequest.meta.fields["pipeline"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12755,7 +28451,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER + for field, value in request_init["pipeline"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12781,10 +28477,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["channel"][field])): - del request_init["channel"][field][i][subfield] + for i in range(0, len(request_init["pipeline"][field])): + del request_init["pipeline"][field][i][subfield] else: - del request_init["channel"][field][subfield] + del request_init["pipeline"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12799,14 +28495,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_channel(request) + response = client.create_pipeline(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_channel_rest_interceptors(null_interceptor): +def test_create_pipeline_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -12816,13 +28512,13 @@ def test_create_channel_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_create_pipeline") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_pipeline_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_pipeline") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) + pb_message = eventarc.CreatePipelineRequest.pb(eventarc.CreatePipelineRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12836,7 +28532,7 @@ def test_create_channel_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.CreateChannelRequest() + request = eventarc.CreatePipelineRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -12845,20 +28541,20 @@ def test_create_channel_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_pipeline(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequest): +def test_update_pipeline_rest_bad_request(request_type=eventarc.UpdatePipelineRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init = {'pipeline': {'name': 'projects/sample1/locations/sample2/pipelines/sample3'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12871,28 +28567,28 @@ def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_channel(request) + client.update_pipeline(request) @pytest.mark.parametrize("request_type", [ - eventarc.UpdateChannelRequest, + eventarc.UpdatePipelineRequest, dict, ]) -def test_update_channel_rest_call_success(request_type): +def test_update_pipeline_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} - request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request_init = {'pipeline': {'name': 'projects/sample1/locations/sample2/pipelines/sample3'}} + request_init["pipeline"] = {'name': 'projects/sample1/locations/sample2/pipelines/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'uid': 'uid_value', 'annotations': {}, 'display_name': 'display_name_value', 'destinations': [{'network_config': {'network_attachment': 'network_attachment_value'}, 'http_endpoint': {'uri': 'uri_value', 'message_binding_template': 'message_binding_template_value'}, 'workflow': 'workflow_value', 'message_bus': 'message_bus_value', 'topic': 'topic_value', 'authentication_config': {'google_oidc': {'service_account': 'service_account_value', 'audience': 'audience_value'}, 'oauth_token': {'service_account': 'service_account_value', 'scope': 'scope_value'}}, 'output_payload_format': {'protobuf': {'schema_definition': 'schema_definition_value'}, 'avro': {'schema_definition': 'schema_definition_value'}, 'json': {}}}], 'mediations': [{'transformation': {'transformation_template': 'transformation_template_value'}}], 'crypto_key_name': 'crypto_key_name_value', 'input_payload_format': {}, 'logging_config': {'log_severity': 1}, 'retry_policy': {'max_attempts': 1303, 'min_retry_delay': {'seconds': 751, 'nanos': 543}, 'max_retry_delay': {}}, 'etag': 'etag_value', 'satisfies_pzs': True} # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateChannelRequest.meta.fields["channel"] + test_field = eventarc.UpdatePipelineRequest.meta.fields["pipeline"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12920,7 +28616,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER + for field, value in request_init["pipeline"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12946,10 +28642,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["channel"][field])): - del request_init["channel"][field][i][subfield] + for i in range(0, len(request_init["pipeline"][field])): + del request_init["pipeline"][field][i][subfield] else: - del request_init["channel"][field][subfield] + del request_init["pipeline"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12964,14 +28660,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_channel(request) + response = client.update_pipeline(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_channel_rest_interceptors(null_interceptor): +def test_update_pipeline_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -12981,13 +28677,13 @@ def test_update_channel_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_update_pipeline") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_pipeline_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_pipeline") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) + pb_message = eventarc.UpdatePipelineRequest.pb(eventarc.UpdatePipelineRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13001,7 +28697,7 @@ def test_update_channel_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.UpdateChannelRequest() + request = eventarc.UpdatePipelineRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -13010,20 +28706,20 @@ def test_update_channel_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_pipeline(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequest): +def test_delete_pipeline_rest_bad_request(request_type=eventarc.DeletePipelineRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/pipelines/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13036,21 +28732,21 @@ def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_channel(request) + client.delete_pipeline(request) @pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelRequest, + eventarc.DeletePipelineRequest, dict, ]) -def test_delete_channel_rest_call_success(request_type): +def test_delete_pipeline_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/pipelines/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13065,14 +28761,14 @@ def test_delete_channel_rest_call_success(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_channel(request) + response = client.delete_pipeline(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_channel_rest_interceptors(null_interceptor): +def test_delete_pipeline_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -13082,13 +28778,13 @@ def test_delete_channel_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_pipeline") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_pipeline_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_pipeline") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) + pb_message = eventarc.DeletePipelineRequest.pb(eventarc.DeletePipelineRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13102,7 +28798,7 @@ def test_delete_channel_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.DeleteChannelRequest() + request = eventarc.DeletePipelineRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -13111,20 +28807,20 @@ def test_delete_channel_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_pipeline(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest): +def test_get_google_api_source_rest_bad_request(request_type=eventarc.GetGoogleApiSourceRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13137,29 +28833,33 @@ def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest) response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_provider(request) + client.get_google_api_source(request) @pytest.mark.parametrize("request_type", [ - eventarc.GetProviderRequest, + eventarc.GetGoogleApiSourceRequest, dict, ]) -def test_get_provider_rest_call_success(request_type): +def test_get_google_api_source_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = discovery.Provider( + return_value = google_api_source.GoogleApiSource( name='name_value', + uid='uid_value', + etag='etag_value', display_name='display_name_value', + destination='destination_value', + crypto_key_name='crypto_key_name_value', ) # Wrap the value into a proper Response obj @@ -13167,21 +28867,25 @@ def test_get_provider_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = discovery.Provider.pb(return_value) + return_value = google_api_source.GoogleApiSource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_provider(request) + response = client.get_google_api_source(request) # Establish that the response is the type that we expect. - assert isinstance(response, discovery.Provider) + assert isinstance(response, google_api_source.GoogleApiSource) assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.etag == 'etag_value' assert response.display_name == 'display_name_value' + assert response.destination == 'destination_value' + assert response.crypto_key_name == 'crypto_key_name_value' @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_provider_rest_interceptors(null_interceptor): +def test_get_google_api_source_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -13190,13 +28894,13 @@ def test_get_provider_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_api_source") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_api_source_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_api_source") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) + pb_message = eventarc.GetGoogleApiSourceRequest.pb(eventarc.GetGoogleApiSourceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13207,26 +28911,26 @@ def test_get_provider_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = discovery.Provider.to_json(discovery.Provider()) + return_value = google_api_source.GoogleApiSource.to_json(google_api_source.GoogleApiSource()) req.return_value.content = return_value - request = eventarc.GetProviderRequest() + request = eventarc.GetGoogleApiSourceRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = discovery.Provider() - post_with_metadata.return_value = discovery.Provider(), metadata + post.return_value = google_api_source.GoogleApiSource() + post_with_metadata.return_value = google_api_source.GoogleApiSource(), metadata - client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_google_api_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): +def test_list_google_api_sources_rest_bad_request(request_type=eventarc.ListGoogleApiSourcesRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -13245,14 +28949,14 @@ def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_providers(request) + client.list_google_api_sources(request) @pytest.mark.parametrize("request_type", [ - eventarc.ListProvidersRequest, + eventarc.ListGoogleApiSourcesRequest, dict, ]) -def test_list_providers_rest_call_success(request_type): +def test_list_google_api_sources_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -13265,7 +28969,7 @@ def test_list_providers_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = eventarc.ListProvidersResponse( + return_value = eventarc.ListGoogleApiSourcesResponse( next_page_token='next_page_token_value', unreachable=['unreachable_value'], ) @@ -13275,21 +28979,21 @@ def test_list_providers_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = eventarc.ListProvidersResponse.pb(return_value) + return_value = eventarc.ListGoogleApiSourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_providers(request) + response = client.list_google_api_sources(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProvidersPager) + assert isinstance(response, pagers.ListGoogleApiSourcesPager) assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_providers_rest_interceptors(null_interceptor): +def test_list_google_api_sources_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -13298,13 +29002,13 @@ def test_list_providers_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_list_google_api_sources") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_google_api_sources_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_google_api_sources") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) + pb_message = eventarc.ListGoogleApiSourcesRequest.pb(eventarc.ListGoogleApiSourcesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13315,32 +29019,32 @@ def test_list_providers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) + return_value = eventarc.ListGoogleApiSourcesResponse.to_json(eventarc.ListGoogleApiSourcesResponse()) req.return_value.content = return_value - request = eventarc.ListProvidersRequest() + request = eventarc.ListGoogleApiSourcesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = eventarc.ListProvidersResponse() - post_with_metadata.return_value = eventarc.ListProvidersResponse(), metadata + post.return_value = eventarc.ListGoogleApiSourcesResponse() + post_with_metadata.return_value = eventarc.ListGoogleApiSourcesResponse(), metadata - client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_google_api_sources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChannelConnectionRequest): +def test_create_google_api_source_rest_bad_request(request_type=eventarc.CreateGoogleApiSourceRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request_init = {'parent': 'projects/sample1/locations/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13353,163 +29057,107 @@ def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChanne response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_channel_connection(request) + client.create_google_api_source(request) @pytest.mark.parametrize("request_type", [ - eventarc.GetChannelConnectionRequest, + eventarc.CreateGoogleApiSourceRequest, dict, ]) -def test_get_channel_connection_rest_call_success(request_type): +def test_create_google_api_source_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_channel_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_channel_connection_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) - req.return_value.content = return_value + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["google_api_source"] = {'name': 'name_value', 'uid': 'uid_value', 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'annotations': {}, 'display_name': 'display_name_value', 'destination': 'destination_value', 'crypto_key_name': 'crypto_key_name_value', 'logging_config': {'log_severity': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - request = eventarc.GetChannelConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = channel_connection.ChannelConnection() - post_with_metadata.return_value = channel_connection.ChannelConnection(), metadata + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateGoogleApiSourceRequest.meta.fields["google_api_source"] - client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListChannelConnectionsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_channel_connections(request) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["google_api_source"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelConnectionsRequest, - dict, -]) -def test_list_channel_connections_rest_call_success(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_api_source"][field])): + del request_init["google_api_source"][field][i][subfield] + else: + del request_init["google_api_source"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) + return_value = operations_pb2.Operation(name='operations/spam') # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_channel_connections(request) + response = client.create_google_api_source(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_channel_connections_rest_interceptors(null_interceptor): +def test_create_google_api_source_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -13518,13 +29166,14 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_google_api_source") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_google_api_source_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_google_api_source") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) + pb_message = eventarc.CreateGoogleApiSourceRequest.pb(eventarc.CreateGoogleApiSourceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13535,32 +29184,32 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.ListChannelConnectionsRequest() + request = eventarc.CreateGoogleApiSourceRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = eventarc.ListChannelConnectionsResponse() - post_with_metadata.return_value = eventarc.ListChannelConnectionsResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_google_api_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_channel_connection_rest_bad_request(request_type=eventarc.CreateChannelConnectionRequest): +def test_update_google_api_source_rest_bad_request(request_type=eventarc.UpdateGoogleApiSourceRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {'google_api_source': {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13573,28 +29222,28 @@ def test_create_channel_connection_rest_bad_request(request_type=eventarc.Create response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_channel_connection(request) + client.update_google_api_source(request) @pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelConnectionRequest, + eventarc.UpdateGoogleApiSourceRequest, dict, ]) -def test_create_channel_connection_rest_call_success(request_type): +def test_update_google_api_source_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} + request_init = {'google_api_source': {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'}} + request_init["google_api_source"] = {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3', 'uid': 'uid_value', 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'annotations': {}, 'display_name': 'display_name_value', 'destination': 'destination_value', 'crypto_key_name': 'crypto_key_name_value', 'logging_config': {'log_severity': 1}} # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] + test_field = eventarc.UpdateGoogleApiSourceRequest.meta.fields["google_api_source"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -13622,7 +29271,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel_connection"].items(): # pragma: NO COVER + for field, value in request_init["google_api_source"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -13648,10 +29297,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["channel_connection"][field])): - del request_init["channel_connection"][field][i][subfield] + for i in range(0, len(request_init["google_api_source"][field])): + del request_init["google_api_source"][field][i][subfield] else: - del request_init["channel_connection"][field][subfield] + del request_init["google_api_source"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13666,14 +29315,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_channel_connection(request) + response = client.update_google_api_source(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_channel_connection_rest_interceptors(null_interceptor): +def test_update_google_api_source_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -13683,13 +29332,13 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_api_source") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_api_source_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_api_source") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) + pb_message = eventarc.UpdateGoogleApiSourceRequest.pb(eventarc.UpdateGoogleApiSourceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13703,7 +29352,7 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.CreateChannelConnectionRequest() + request = eventarc.UpdateGoogleApiSourceRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -13712,20 +29361,20 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_google_api_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_channel_connection_rest_bad_request(request_type=eventarc.DeleteChannelConnectionRequest): +def test_delete_google_api_source_rest_bad_request(request_type=eventarc.DeleteGoogleApiSourceRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13738,21 +29387,21 @@ def test_delete_channel_connection_rest_bad_request(request_type=eventarc.Delete response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_channel_connection(request) + client.delete_google_api_source(request) @pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelConnectionRequest, + eventarc.DeleteGoogleApiSourceRequest, dict, ]) -def test_delete_channel_connection_rest_call_success(request_type): +def test_delete_google_api_source_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/googleApiSources/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13767,14 +29416,14 @@ def test_delete_channel_connection_rest_call_success(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_channel_connection(request) + response = client.delete_google_api_source(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_channel_connection_rest_interceptors(null_interceptor): +def test_delete_google_api_source_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -13784,13 +29433,13 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_google_api_source") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_google_api_source_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_google_api_source") as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) + pb_message = eventarc.DeleteGoogleApiSourceRequest.pb(eventarc.DeleteGoogleApiSourceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13804,7 +29453,7 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = eventarc.DeleteChannelConnectionRequest() + request = eventarc.DeleteGoogleApiSourceRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -13813,300 +29462,285 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_google_api_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoogleChannelConfigRequest): +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} - request = request_type(**request_init) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = mock.Mock() + response_value = Response() json_return_value = '' response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.request = Request() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_google_channel_config(request) + client.get_location(request) @pytest.mark.parametrize("request_type", [ - eventarc.GetGoogleChannelConfigRequest, - dict, + locations_pb2.GetLocationRequest, + dict, ]) -def test_get_google_channel_config_rest_call_success(request_type): +def test_get_location_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request_init = {'name': 'projects/sample1/locations/sample2'} request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) + return_value = locations_pb2.Location() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = google_channel_config.GoogleChannelConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_google_channel_config(request) + + response = client.get_location(request) # Establish that the response is the type that we expect. - assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert isinstance(response, locations_pb2.Location) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_google_channel_config_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) - req.return_value = mock.Mock() - req.return_value.status_code = 200 + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) - req.return_value.content = return_value - request = eventarc.GetGoogleChannelConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = google_channel_config.GoogleChannelConfig() - post_with_metadata.return_value = google_channel_config.GoogleChannelConfig(), metadata + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + response = client.get_iam_policy(request) - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) -def test_update_google_channel_config_rest_bad_request(request_type=eventarc.UpdateGoogleChannelConfigRequest): +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request = request_type(**request_init) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): # Wrap the value into a proper Response obj - response_value = mock.Mock() + response_value = Response() json_return_value = '' response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.request = Request() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_google_channel_config(request) + client.set_iam_policy(request) @pytest.mark.parametrize("request_type", [ - eventarc.UpdateGoogleChannelConfigRequest, - dict, + iam_policy_pb2.SetIamPolicyRequest, + dict, ]) -def test_update_google_channel_config_rest_call_success(request_type): +def test_set_iam_policy_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + response = client.set_iam_policy(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["google_channel_config"][field])): - del request_init["google_channel_config"][field][i][subfield] - else: - del request_init["google_channel_config"][field][subfield] - request = request_type(**request_init) +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) + return_value = iam_policy_pb2.TestIamPermissionsResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_google_channel_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_google_channel_config_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = mock.Mock() - req.return_value.status_code = 200 + req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) - req.return_value.content = return_value - request = eventarc.UpdateGoogleChannelConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gce_google_channel_config.GoogleChannelConfig() - post_with_metadata.return_value = gce_google_channel_config.GoogleChannelConfig(), metadata - - client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + response = client.test_iam_permissions(request) - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): @@ -14118,48 +29752,48 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq response_value.request = Request() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) + client.cancel_operation(request) @pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, + operations_pb2.CancelOperationRequest, dict, ]) -def test_get_location_rest(request_type): +def test_cancel_operation_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = '{}' response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_location(request) + response = client.cancel_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): @@ -14171,48 +29805,48 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation response_value.request = Request() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) + client.delete_operation(request) @pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, + operations_pb2.DeleteOperationRequest, dict, ]) -def test_list_locations_rest(request_type): +def test_delete_operation_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = '{}' response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_locations(request) + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + assert response is None -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): @@ -14224,25 +29858,25 @@ def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolic response_value.request = Request() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) + client.get_operation(request) @pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, + operations_pb2.GetOperationRequest, dict, ]) -def test_get_iam_policy_rest(request_type): +def test_get_operation_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = operations_pb2.Operation() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14253,19 +29887,19 @@ def test_get_iam_policy_rest(request_type): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) + response = client.get_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) + assert isinstance(response, operations_pb2.Operation) -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): @@ -14277,25 +29911,25 @@ def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolic response_value.request = Request() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) + client.list_operations(request) @pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + operations_pb2.ListOperationsRequest, dict, ]) -def test_set_iam_policy_rest(request_type): +def test_list_operations_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = operations_pb2.ListOperationsResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14306,287 +29940,484 @@ def test_set_iam_policy_rest(request_type): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) + response = client.list_operations(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + client.get_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_triggers_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + client.list_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + client.create_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + client.update_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + client.delete_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + client.get_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channels_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + client.list_channels(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + client.create_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + client.update_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + client.delete_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_provider_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + client.get_provider(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetProviderRequest() + assert args[0] == request_msg -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_providers_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + client.list_providers(request=None) - response = client.test_iam_permissions(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListProvidersRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert args[0] == request_msg -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_connection_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + client.get_channel_connection(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelConnectionRequest() -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channel_connections_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + client.list_channel_connections(request=None) - response = client.cancel_operation(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelConnectionsRequest() - # Establish that the response is the type that we expect. - assert response is None + assert args[0] == request_msg -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_connection_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + client.create_channel_connection(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelConnectionRequest() -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_connection_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + client.delete_channel_connection(request=None) - response = client.delete_operation(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelConnectionRequest() - # Establish that the response is the type that we expect. - assert response is None + assert args[0] == request_msg -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_google_channel_config_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + client.get_google_channel_config(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleChannelConfigRequest() -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_google_channel_config_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + client.update_google_channel_config(request=None) - response = client.get_operation(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleChannelConfigRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + assert args[0] == request_msg -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_message_bus_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_message_bus), + '__call__') as call: + client.get_message_bus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetMessageBusRequest() + assert args[0] == request_msg -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_message_buses_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_message_buses), + '__call__') as call: + client.list_message_buses(request=None) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListMessageBusesRequest() - response = client.list_operations(request) + assert args[0] == request_msg - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_initialize_client_w_rest(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_message_bus_enrollments_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + transport="rest", ) - assert client is not None + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_message_bus_enrollments), + '__call__') as call: + client.list_message_bus_enrollments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListMessageBusEnrollmentsRequest() + + assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_trigger_empty_call_rest(): +def test_create_message_bus_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14594,21 +30425,21 @@ def test_get_trigger_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_trigger), + type(client.transport.create_message_bus), '__call__') as call: - client.get_trigger(request=None) + client.create_message_bus(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetTriggerRequest() + request_msg = eventarc.CreateMessageBusRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_triggers_empty_call_rest(): +def test_update_message_bus_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14616,21 +30447,21 @@ def test_list_triggers_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_triggers), + type(client.transport.update_message_bus), '__call__') as call: - client.list_triggers(request=None) + client.update_message_bus(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListTriggersRequest() + request_msg = eventarc.UpdateMessageBusRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_create_trigger_empty_call_rest(): +def test_delete_message_bus_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14638,21 +30469,21 @@ def test_create_trigger_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_trigger), + type(client.transport.delete_message_bus), '__call__') as call: - client.create_trigger(request=None) + client.delete_message_bus(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateTriggerRequest() + request_msg = eventarc.DeleteMessageBusRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_update_trigger_empty_call_rest(): +def test_get_enrollment_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14660,21 +30491,21 @@ def test_update_trigger_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_trigger), + type(client.transport.get_enrollment), '__call__') as call: - client.update_trigger(request=None) + client.get_enrollment(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateTriggerRequest() + request_msg = eventarc.GetEnrollmentRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_delete_trigger_empty_call_rest(): +def test_list_enrollments_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14682,21 +30513,21 @@ def test_delete_trigger_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_trigger), + type(client.transport.list_enrollments), '__call__') as call: - client.delete_trigger(request=None) + client.list_enrollments(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteTriggerRequest() + request_msg = eventarc.ListEnrollmentsRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_channel_empty_call_rest(): +def test_create_enrollment_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14704,21 +30535,21 @@ def test_get_channel_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_channel), + type(client.transport.create_enrollment), '__call__') as call: - client.get_channel(request=None) + client.create_enrollment(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetChannelRequest() + request_msg = eventarc.CreateEnrollmentRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_channels_empty_call_rest(): +def test_update_enrollment_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14726,21 +30557,21 @@ def test_list_channels_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_channels), + type(client.transport.update_enrollment), '__call__') as call: - client.list_channels(request=None) + client.update_enrollment(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListChannelsRequest() + request_msg = eventarc.UpdateEnrollmentRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_create_channel_empty_call_rest(): +def test_delete_enrollment_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14748,21 +30579,21 @@ def test_create_channel_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_channel_), + type(client.transport.delete_enrollment), '__call__') as call: - client.create_channel(request=None) + client.delete_enrollment(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateChannelRequest() + request_msg = eventarc.DeleteEnrollmentRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_update_channel_empty_call_rest(): +def test_get_pipeline_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14770,21 +30601,21 @@ def test_update_channel_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_channel), + type(client.transport.get_pipeline), '__call__') as call: - client.update_channel(request=None) + client.get_pipeline(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateChannelRequest() + request_msg = eventarc.GetPipelineRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_delete_channel_empty_call_rest(): +def test_list_pipelines_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14792,21 +30623,21 @@ def test_delete_channel_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_channel), + type(client.transport.list_pipelines), '__call__') as call: - client.delete_channel(request=None) + client.list_pipelines(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteChannelRequest() + request_msg = eventarc.ListPipelinesRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_provider_empty_call_rest(): +def test_create_pipeline_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14814,21 +30645,21 @@ def test_get_provider_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_provider), + type(client.transport.create_pipeline), '__call__') as call: - client.get_provider(request=None) + client.create_pipeline(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetProviderRequest() + request_msg = eventarc.CreatePipelineRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_providers_empty_call_rest(): +def test_update_pipeline_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14836,21 +30667,21 @@ def test_list_providers_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_providers), + type(client.transport.update_pipeline), '__call__') as call: - client.list_providers(request=None) + client.update_pipeline(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListProvidersRequest() + request_msg = eventarc.UpdatePipelineRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_channel_connection_empty_call_rest(): +def test_delete_pipeline_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14858,21 +30689,21 @@ def test_get_channel_connection_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), + type(client.transport.delete_pipeline), '__call__') as call: - client.get_channel_connection(request=None) + client.delete_pipeline(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetChannelConnectionRequest() + request_msg = eventarc.DeletePipelineRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_channel_connections_empty_call_rest(): +def test_get_google_api_source_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14880,21 +30711,21 @@ def test_list_channel_connections_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), + type(client.transport.get_google_api_source), '__call__') as call: - client.list_channel_connections(request=None) + client.get_google_api_source(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.ListChannelConnectionsRequest() + request_msg = eventarc.GetGoogleApiSourceRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_create_channel_connection_empty_call_rest(): +def test_list_google_api_sources_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14902,21 +30733,21 @@ def test_create_channel_connection_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), + type(client.transport.list_google_api_sources), '__call__') as call: - client.create_channel_connection(request=None) + client.list_google_api_sources(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.CreateChannelConnectionRequest() + request_msg = eventarc.ListGoogleApiSourcesRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_delete_channel_connection_empty_call_rest(): +def test_create_google_api_source_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14924,21 +30755,21 @@ def test_delete_channel_connection_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), + type(client.transport.create_google_api_source), '__call__') as call: - client.delete_channel_connection(request=None) + client.create_google_api_source(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.DeleteChannelConnectionRequest() + request_msg = eventarc.CreateGoogleApiSourceRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_google_channel_config_empty_call_rest(): +def test_update_google_api_source_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14946,21 +30777,21 @@ def test_get_google_channel_config_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), + type(client.transport.update_google_api_source), '__call__') as call: - client.get_google_channel_config(request=None) + client.update_google_api_source(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.GetGoogleChannelConfigRequest() + request_msg = eventarc.UpdateGoogleApiSourceRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_update_google_channel_config_empty_call_rest(): +def test_delete_google_api_source_empty_call_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14968,14 +30799,14 @@ def test_update_google_channel_config_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), + type(client.transport.delete_google_api_source), '__call__') as call: - client.update_google_channel_config(request=None) + client.delete_google_api_source(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = eventarc.UpdateGoogleChannelConfigRequest() + request_msg = eventarc.DeleteGoogleApiSourceRequest() assert args[0] == request_msg @@ -15044,6 +30875,27 @@ def test_eventarc_base_transport(): 'delete_channel_connection', 'get_google_channel_config', 'update_google_channel_config', + 'get_message_bus', + 'list_message_buses', + 'list_message_bus_enrollments', + 'create_message_bus', + 'update_message_bus', + 'delete_message_bus', + 'get_enrollment', + 'list_enrollments', + 'create_enrollment', + 'update_enrollment', + 'delete_enrollment', + 'get_pipeline', + 'list_pipelines', + 'create_pipeline', + 'update_pipeline', + 'delete_pipeline', + 'get_google_api_source', + 'list_google_api_sources', + 'create_google_api_source', + 'update_google_api_source', + 'delete_google_api_source', 'set_iam_policy', 'get_iam_policy', 'test_iam_permissions', @@ -15350,6 +31202,69 @@ def test_eventarc_client_transport_session_collision(transport_name): session1 = client1.transport.update_google_channel_config._session session2 = client2.transport.update_google_channel_config._session assert session1 != session2 + session1 = client1.transport.get_message_bus._session + session2 = client2.transport.get_message_bus._session + assert session1 != session2 + session1 = client1.transport.list_message_buses._session + session2 = client2.transport.list_message_buses._session + assert session1 != session2 + session1 = client1.transport.list_message_bus_enrollments._session + session2 = client2.transport.list_message_bus_enrollments._session + assert session1 != session2 + session1 = client1.transport.create_message_bus._session + session2 = client2.transport.create_message_bus._session + assert session1 != session2 + session1 = client1.transport.update_message_bus._session + session2 = client2.transport.update_message_bus._session + assert session1 != session2 + session1 = client1.transport.delete_message_bus._session + session2 = client2.transport.delete_message_bus._session + assert session1 != session2 + session1 = client1.transport.get_enrollment._session + session2 = client2.transport.get_enrollment._session + assert session1 != session2 + session1 = client1.transport.list_enrollments._session + session2 = client2.transport.list_enrollments._session + assert session1 != session2 + session1 = client1.transport.create_enrollment._session + session2 = client2.transport.create_enrollment._session + assert session1 != session2 + session1 = client1.transport.update_enrollment._session + session2 = client2.transport.update_enrollment._session + assert session1 != session2 + session1 = client1.transport.delete_enrollment._session + session2 = client2.transport.delete_enrollment._session + assert session1 != session2 + session1 = client1.transport.get_pipeline._session + session2 = client2.transport.get_pipeline._session + assert session1 != session2 + session1 = client1.transport.list_pipelines._session + session2 = client2.transport.list_pipelines._session + assert session1 != session2 + session1 = client1.transport.create_pipeline._session + session2 = client2.transport.create_pipeline._session + assert session1 != session2 + session1 = client1.transport.update_pipeline._session + session2 = client2.transport.update_pipeline._session + assert session1 != session2 + session1 = client1.transport.delete_pipeline._session + session2 = client2.transport.delete_pipeline._session + assert session1 != session2 + session1 = client1.transport.get_google_api_source._session + session2 = client2.transport.get_google_api_source._session + assert session1 != session2 + session1 = client1.transport.list_google_api_sources._session + session2 = client2.transport.list_google_api_sources._session + assert session1 != session2 + session1 = client1.transport.create_google_api_source._session + session2 = client2.transport.create_google_api_source._session + assert session1 != session2 + session1 = client1.transport.update_google_api_source._session + session2 = client2.transport.update_google_api_source._session + assert session1 != session2 + session1 = client1.transport.delete_google_api_source._session + session2 = client2.transport.delete_google_api_source._session + assert session1 != session2 def test_eventarc_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -15580,6 +31495,48 @@ def test_parse_crypto_key_path(): actual = EventarcClient.parse_crypto_key_path(path) assert expected == actual +def test_enrollment_path(): + project = "whelk" + location = "octopus" + enrollment = "oyster" + expected = "projects/{project}/locations/{location}/enrollments/{enrollment}".format(project=project, location=location, enrollment=enrollment, ) + actual = EventarcClient.enrollment_path(project, location, enrollment) + assert expected == actual + + +def test_parse_enrollment_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "enrollment": "mussel", + } + path = EventarcClient.enrollment_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_enrollment_path(path) + assert expected == actual + +def test_google_api_source_path(): + project = "winkle" + location = "nautilus" + google_api_source = "scallop" + expected = "projects/{project}/locations/{location}/googleApiSources/{google_api_source}".format(project=project, location=location, google_api_source=google_api_source, ) + actual = EventarcClient.google_api_source_path(project, location, google_api_source) + assert expected == actual + + +def test_parse_google_api_source_path(): + expected = { + "project": "abalone", + "location": "squid", + "google_api_source": "clam", + } + path = EventarcClient.google_api_source_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_google_api_source_path(path) + assert expected == actual + def test_google_channel_config_path(): project = "whelk" location = "octopus" @@ -15599,10 +31556,73 @@ def test_parse_google_channel_config_path(): actual = EventarcClient.parse_google_channel_config_path(path) assert expected == actual -def test_provider_path(): +def test_message_bus_path(): + project = "cuttlefish" + location = "mussel" + message_bus = "winkle" + expected = "projects/{project}/locations/{location}/messageBuses/{message_bus}".format(project=project, location=location, message_bus=message_bus, ) + actual = EventarcClient.message_bus_path(project, location, message_bus) + assert expected == actual + + +def test_parse_message_bus_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "message_bus": "abalone", + } + path = EventarcClient.message_bus_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_message_bus_path(path) + assert expected == actual + +def test_network_attachment_path(): + project = "squid" + region = "clam" + networkattachment = "whelk" + expected = "projects/{project}/regions/{region}/networkAttachments/{networkattachment}".format(project=project, region=region, networkattachment=networkattachment, ) + actual = EventarcClient.network_attachment_path(project, region, networkattachment) + assert expected == actual + + +def test_parse_network_attachment_path(): + expected = { + "project": "octopus", + "region": "oyster", + "networkattachment": "nudibranch", + } + path = EventarcClient.network_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_network_attachment_path(path) + assert expected == actual + +def test_pipeline_path(): project = "cuttlefish" location = "mussel" - provider = "winkle" + pipeline = "winkle" + expected = "projects/{project}/locations/{location}/pipelines/{pipeline}".format(project=project, location=location, pipeline=pipeline, ) + actual = EventarcClient.pipeline_path(project, location, pipeline) + assert expected == actual + + +def test_parse_pipeline_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "pipeline": "abalone", + } + path = EventarcClient.pipeline_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_pipeline_path(path) + assert expected == actual + +def test_provider_path(): + project = "squid" + location = "clam" + provider = "whelk" expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) actual = EventarcClient.provider_path(project, location, provider) assert expected == actual @@ -15610,9 +31630,9 @@ def test_provider_path(): def test_parse_provider_path(): expected = { - "project": "nautilus", - "location": "scallop", - "provider": "abalone", + "project": "octopus", + "location": "oyster", + "provider": "nudibranch", } path = EventarcClient.provider_path(**expected) @@ -15636,8 +31656,8 @@ def test_parse_service_path(): assert expected == actual def test_service_account_path(): - project = "squid" - service_account = "clam" + project = "cuttlefish" + service_account = "mussel" expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) actual = EventarcClient.service_account_path(project, service_account) assert expected == actual @@ -15645,8 +31665,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "whelk", - "service_account": "octopus", + "project": "winkle", + "service_account": "nautilus", } path = EventarcClient.service_account_path(**expected) @@ -15654,10 +31674,29 @@ def test_parse_service_account_path(): actual = EventarcClient.parse_service_account_path(path) assert expected == actual +def test_topic_path(): + project = "scallop" + topic = "abalone" + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + actual = EventarcClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "squid", + "topic": "clam", + } + path = EventarcClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_topic_path(path) + assert expected == actual + def test_trigger_path(): - project = "oyster" - location = "nudibranch" - trigger = "cuttlefish" + project = "whelk" + location = "octopus" + trigger = "oyster" expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) actual = EventarcClient.trigger_path(project, location, trigger) assert expected == actual @@ -15665,9 +31704,9 @@ def test_trigger_path(): def test_parse_trigger_path(): expected = { - "project": "mussel", - "location": "winkle", - "trigger": "nautilus", + "project": "nudibranch", + "location": "cuttlefish", + "trigger": "mussel", } path = EventarcClient.trigger_path(**expected) @@ -15676,9 +31715,9 @@ def test_parse_trigger_path(): assert expected == actual def test_workflow_path(): - project = "scallop" - location = "abalone" - workflow = "squid" + project = "winkle" + location = "nautilus" + workflow = "scallop" expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) actual = EventarcClient.workflow_path(project, location, workflow) assert expected == actual @@ -15686,9 +31725,9 @@ def test_workflow_path(): def test_parse_workflow_path(): expected = { - "project": "clam", - "location": "whelk", - "workflow": "octopus", + "project": "abalone", + "location": "squid", + "workflow": "clam", } path = EventarcClient.workflow_path(**expected) @@ -15697,7 +31736,7 @@ def test_parse_workflow_path(): assert expected == actual def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) actual = EventarcClient.common_billing_account_path(billing_account) assert expected == actual @@ -15705,7 +31744,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "octopus", } path = EventarcClient.common_billing_account_path(**expected) @@ -15714,7 +31753,7 @@ def test_parse_common_billing_account_path(): assert expected == actual def test_common_folder_path(): - folder = "cuttlefish" + folder = "oyster" expected = "folders/{folder}".format(folder=folder, ) actual = EventarcClient.common_folder_path(folder) assert expected == actual @@ -15722,7 +31761,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nudibranch", } path = EventarcClient.common_folder_path(**expected) @@ -15731,7 +31770,7 @@ def test_parse_common_folder_path(): assert expected == actual def test_common_organization_path(): - organization = "winkle" + organization = "cuttlefish" expected = "organizations/{organization}".format(organization=organization, ) actual = EventarcClient.common_organization_path(organization) assert expected == actual @@ -15739,7 +31778,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "mussel", } path = EventarcClient.common_organization_path(**expected) @@ -15748,7 +31787,7 @@ def test_parse_common_organization_path(): assert expected == actual def test_common_project_path(): - project = "scallop" + project = "winkle" expected = "projects/{project}".format(project=project, ) actual = EventarcClient.common_project_path(project) assert expected == actual @@ -15756,7 +31795,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nautilus", } path = EventarcClient.common_project_path(**expected) @@ -15765,8 +31804,8 @@ def test_parse_common_project_path(): assert expected == actual def test_common_location_path(): - project = "squid" - location = "clam" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) actual = EventarcClient.common_location_path(project, location) assert expected == actual @@ -15774,8 +31813,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "squid", + "location": "clam", } path = EventarcClient.common_location_path(**expected)