Skip to content

Commit f1d8291

Browse files
committed
Adds unit tests, updates helpers, revises several client attrs
1 parent 48ed583 commit f1d8291

File tree

3 files changed

+224
-152
lines changed

3 files changed

+224
-152
lines changed

google/cloud/bigquery_v2/services/centralized_service/_helpers.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,31 @@ def _drop_self_key(kwargs):
2222
raise TypeError("kwargs must be a dict.")
2323
kwargs.pop("self", None) # Essentially a no-op if 'self' key does not exist
2424
return kwargs
25+
26+
27+
def _make_request(
28+
request_class,
29+
user_request,
30+
identifier_value,
31+
identifier_name: str,
32+
parser,
33+
identifier_required: bool = True,
34+
):
35+
if user_request is not None and identifier_value is not None:
36+
raise ValueError(
37+
f"Provide either a request object or '{identifier_name}', not both."
38+
)
39+
40+
if user_request is not None:
41+
return user_request
42+
43+
if identifier_required and identifier_value is None:
44+
raise ValueError(
45+
f"Either a request object or '{identifier_name}' must be provided."
46+
)
47+
48+
if identifier_value is None:
49+
request_fields = parser()
50+
else:
51+
request_fields = parser(identifier_value)
52+
return request_class(**request_fields)

google/cloud/bigquery_v2/services/centralized_service/client.py

Lines changed: 100 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
import os
1818
from typing import (
19+
Dict,
1920
Optional,
2021
Sequence,
2122
Tuple,
@@ -34,6 +35,7 @@
3435
# Import types modules (to access *Requests classes)
3536
from google.cloud.bigquery_v2.types import (
3637
dataset,
38+
dataset_reference,
3739
job,
3840
model,
3941
)
@@ -43,151 +45,182 @@
4345
from google.api_core import retry as retries
4446
from google.auth import credentials as auth_credentials
4547

46-
# Create a type alias
48+
# Create type aliases
4749
try:
4850
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
4951
except AttributeError: # pragma: NO COVER
5052
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
5153

52-
# TODO: This line is here to simplify prototyping, etc.
53-
PROJECT_ID = os.environ.get("GOOGLE_CLOUD_PROJECT")
54+
DatasetIdentifier = Union[str, dataset_reference.DatasetReference]
5455

56+
# TODO: This variable is here to simplify prototyping, etc.
57+
PROJECT_ID = os.environ.get("GOOGLE_CLOUD_PROJECT")
5558
DEFAULT_RETRY: OptionalRetry = gapic_v1.method.DEFAULT
5659
DEFAULT_TIMEOUT: Union[float, object] = gapic_v1.method.DEFAULT
5760
DEFAULT_METADATA: Sequence[Tuple[str, Union[str, bytes]]] = ()
5861

59-
6062
# Create Centralized Client
6163
class BigQueryClient:
64+
"""A centralized client for BigQuery API."""
65+
6266
def __init__(
6367
self,
6468
*,
6569
credentials: Optional[auth_credentials.Credentials] = None,
6670
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
6771
):
68-
self._clients = {}
72+
"""
73+
Initializes the BigQueryClient.
74+
75+
Args:
76+
credentials:
77+
The credentials to use for authentication. If not provided, the
78+
client will attempt to use the default credentials.
79+
client_options:
80+
A dictionary of client options to pass to the underlying
81+
service clients.
82+
"""
83+
84+
self._clients: Dict[str, object] = {}
6985
self._credentials = credentials
7086
self._client_options = client_options
87+
self.project = PROJECT_ID
88+
89+
# --- HELPER METHODS ---
90+
def _parse_dataset_path(self, dataset_path: str) -> Tuple[Optional[str], str]:
91+
"""
92+
Helper to parse project and dataset from a string identifier.
93+
94+
Args:
95+
dataset_path: A string in the format 'project_id.dataset_id' or
96+
'dataset_id'.
97+
98+
Returns:
99+
A tuple of (project_id, dataset_id).
100+
"""
101+
if "." in dataset_path:
102+
project_id, dataset_id = dataset_path.split(".", 1)
103+
return project_id, dataset_id
104+
return self.project, dataset_path
105+
106+
def _parse_dataset_id_to_dict(self, dataset_id: DatasetIdentifier) -> dict:
107+
if isinstance(dataset_id, str):
108+
project_id, dataset_id_str = self._parse_dataset_path(dataset_id)
109+
return {"project_id": project_id, "dataset_id": dataset_id_str}
110+
elif isinstance(dataset_id, dataset_reference.DatasetReference):
111+
return {
112+
"project_id": dataset_id.project_id,
113+
"dataset_id": dataset_id.dataset_id,
114+
}
115+
else:
116+
raise TypeError(f"Invalid type for dataset_id: {type(dataset_id)}")
71117

72-
# --- SERVICE CLIENT ATTRIBUTES ---
118+
def _parse_project_id_to_dict(self, project_id: Optional[str] = None) -> dict:
119+
"""Helper to create a request dictionary from a project_id."""
120+
final_project_id = project_id or self.project
121+
return {"project_id": final_project_id}
73122

123+
# --- *SERVICECLIENT ATTRIBUTES ---
74124
@property
75125
def dataset_service_client(self):
76126
if "dataset" not in self._clients:
77-
from google.cloud.bigquery_v2.services import dataset_service
78-
79127
self._clients["dataset"] = dataset_service.DatasetServiceClient(
80128
credentials=self._credentials, client_options=self._client_options
81129
)
82130
return self._clients["dataset"]
83131

84132
@dataset_service_client.setter
85133
def dataset_service_client(self, value):
86-
# Check for the methods the centralized client exposes (to allow duck-typing)
87-
required_methods = [
88-
"get_dataset",
89-
"insert_dataset",
90-
"patch_dataset",
91-
"update_dataset",
92-
"delete_dataset",
93-
"list_datasets",
94-
"undelete_dataset",
95-
]
96-
for method in required_methods:
97-
if not hasattr(value, method) or not callable(getattr(value, method)):
98-
raise AttributeError(
99-
f"Object assigned to dataset_service_client is missing a callable '{method}' method."
100-
)
134+
if not isinstance(value, dataset_service.DatasetServiceClient):
135+
raise TypeError(
136+
"Expected an instance of dataset_service.DatasetServiceClient."
137+
)
101138
self._clients["dataset"] = value
102139

103140
@property
104141
def job_service_client(self):
105142
if "job" not in self._clients:
106-
from google.cloud.bigquery_v2.services import job_service
107-
108143
self._clients["job"] = job_service.JobServiceClient(
109144
credentials=self._credentials, client_options=self._client_options
110145
)
111146
return self._clients["job"]
112147

113148
@job_service_client.setter
114149
def job_service_client(self, value):
115-
required_methods = [
116-
"get_job",
117-
"insert_job",
118-
"cancel_job",
119-
"delete_job",
120-
"list_jobs",
121-
]
122-
for method in required_methods:
123-
if not hasattr(value, method) or not callable(getattr(value, method)):
124-
raise AttributeError(
125-
f"Object assigned to job_service_client is missing a callable '{method}' method."
126-
)
150+
if not isinstance(value, job_service.JobServiceClient):
151+
raise TypeError("Expected an instance of job_service.JobServiceClient.")
127152
self._clients["job"] = value
128153

129154
@property
130155
def model_service_client(self):
131156
if "model" not in self._clients:
132-
from google.cloud.bigquery_v2.services import model_service
133-
134157
self._clients["model"] = model_service.ModelServiceClient(
135158
credentials=self._credentials, client_options=self._client_options
136159
)
137160
return self._clients["model"]
138161

139162
@model_service_client.setter
140163
def model_service_client(self, value):
141-
required_methods = [
142-
"get_model",
143-
"delete_model",
144-
"patch_model",
145-
"list_models",
146-
]
147-
for method in required_methods:
148-
if not hasattr(value, method) or not callable(getattr(value, method)):
149-
raise AttributeError(
150-
f"Object assigned to model_service_client is missing a callable '{method}' method."
151-
)
164+
if not isinstance(value, model_service.ModelServiceClient):
165+
raise TypeError("Expected an instance of model_service.ModelServiceClient.")
152166
self._clients["model"] = value
153167

154-
# --- SERVICE CLIENT METHODS ---
155-
# TODO: refactor the microgenerator template so that everything related to
156-
# a single ServiceClient is kept close togetehr in the same section of this class:
157-
# @property
158-
# @setter
159-
# _method_A()
160-
# _method_B()
161-
# _method_C()
162-
# etc
163-
168+
# --- *SERVICECLIENT METHODS ---
164169
def get_dataset(
165170
self,
166-
request: Optional[Union[dataset.GetDatasetRequest, dict]] = None,
171+
dataset_id: Optional[DatasetIdentifier] = None,
167172
*,
173+
request: Optional["dataset.GetDatasetRequest"] = None,
168174
retry: OptionalRetry = DEFAULT_RETRY,
169175
timeout: Union[float, object] = DEFAULT_TIMEOUT,
170176
metadata: Sequence[Tuple[str, Union[str, bytes]]] = DEFAULT_METADATA,
171-
):
177+
) -> "dataset.Dataset":
172178
"""
173179
TODO: Docstring is purposefully blank. microgenerator will add automatically.
174180
"""
175-
kwargs = _helpers._drop_self_key(locals())
176-
return self.dataset_service_client.get_dataset(**kwargs)
181+
final_request = _helpers._make_request(
182+
request_class=dataset.GetDatasetRequest,
183+
user_request=request,
184+
identifier_value=dataset_id,
185+
identifier_name="dataset_id",
186+
parser=self._parse_dataset_id_to_dict,
187+
identifier_required=True,
188+
)
189+
190+
return self.dataset_service_client.get_dataset(
191+
request=final_request,
192+
retry=retry,
193+
timeout=timeout,
194+
metadata=metadata,
195+
)
177196

178197
def list_datasets(
179198
self,
180-
request: Optional[Union[dataset.ListDatasetsRequest, dict]] = None,
199+
project_id: Optional[str] = None,
181200
*,
201+
request: Optional["dataset.ListDatasetsRequest"] = None,
182202
retry: OptionalRetry = DEFAULT_RETRY,
183203
timeout: Union[float, object] = DEFAULT_TIMEOUT,
184204
metadata: Sequence[Tuple[str, Union[str, bytes]]] = DEFAULT_METADATA,
185205
):
186206
"""
187207
TODO: Docstring is purposefully blank. microgenerator will add automatically.
188208
"""
189-
kwargs = _helpers._drop_self_key(locals())
190-
return self.dataset_service_client.list_datasets(**kwargs)
209+
final_request = _helpers._make_request(
210+
request_class=dataset.ListDatasetsRequest,
211+
user_request=request,
212+
identifier_value=project_id,
213+
identifier_name="project_id",
214+
parser=self._parse_project_id_to_dict,
215+
identifier_required=False,
216+
)
217+
218+
return self.dataset_service_client.list_datasets(
219+
request=final_request,
220+
retry=retry,
221+
timeout=timeout,
222+
metadata=metadata,
223+
)
191224

192225
def list_jobs(
193226
self,

0 commit comments

Comments
 (0)