Skip to content

Commit 5fb37e0

Browse files
committed
updates enums, client, and tests
1 parent 059fd9a commit 5fb37e0

File tree

3 files changed

+103
-45
lines changed

3 files changed

+103
-45
lines changed

google/cloud/bigquery/client.py

Lines changed: 22 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@
9191
from google.cloud.bigquery.dataset import DatasetListItem
9292
from google.cloud.bigquery.dataset import DatasetReference
9393
from google.cloud.bigquery.enums import AutoRowIDs
94+
from google.cloud.bigquery.enums import UpdateMode
9495
from google.cloud.bigquery.format_options import ParquetOptions
9596
from google.cloud.bigquery.job import (
9697
CopyJob,
@@ -1198,7 +1199,7 @@ def update_dataset(
11981199
fields: Sequence[str],
11991200
retry: retries.Retry = DEFAULT_RETRY,
12001201
timeout: TimeoutType = DEFAULT_TIMEOUT,
1201-
update_mode: Optional[enums.UpdateMode] = None,
1202+
update_mode: Optional[UpdateMode] = None,
12021203
) -> Dataset:
12031204
"""Change some fields of a dataset.
12041205
@@ -1238,6 +1239,20 @@ def update_dataset(
12381239
timeout (Optional[float]):
12391240
The number of seconds to wait for the underlying HTTP transport
12401241
before using ``retry``.
1242+
update_mode (Optional[google.cloud.bigquery.enums.UpdateMode]):
1243+
Specifies the kind of information to update in a dataset.
1244+
By default, dataset metadata (e.g. friendlyName, description,
1245+
labels, etc) and ACL information are updated. This argument can
1246+
take on the following possible enum values.
1247+
1248+
* :attr:`~google.cloud.bigquery.enums.UPDATE_MODE_UNSPECIFIED`:
1249+
The default value. Behavior defaults to UPDATE_FULL.
1250+
* :attr:`~google.cloud.bigquery.enums.UpdateMode.UPDATE_METADATA`:
1251+
Includes metadata information for the dataset, such as friendlyName, description, labels, etc.
1252+
* :attr:`~google.cloud.bigquery.enums.UpdateMode.UPDATE_ACL`:
1253+
Includes ACL information for the dataset, which defines dataset access for one or more entities.
1254+
* :attr:`~google.cloud.bigquery.enums.UpdateMode.UPDATE_FULL`:
1255+
Includes both dataset metadata and ACL information.
12411256
12421257
Returns:
12431258
google.cloud.bigquery.dataset.Dataset:
@@ -1250,9 +1265,11 @@ def update_dataset(
12501265
headers = None
12511266
path = dataset.path
12521267
span_attributes = {"path": path, "fields": fields}
1253-
query_params: Dict[str, Any] = {}
1254-
if update_mode is not None:
1255-
query_params["updateMode"] = str(update_mode.value)
1268+
1269+
if update_mode:
1270+
query_params = {"updateMode": update_mode.value}
1271+
else:
1272+
query_params = {}
12561273

12571274
api_response = self._call_api(
12581275
retry,
@@ -1263,7 +1280,7 @@ def update_dataset(
12631280
data=partial,
12641281
headers=headers,
12651282
timeout=timeout,
1266-
query_params=query_params if query_params else None,
1283+
query_params=query_params,
12671284
)
12681285
return Dataset.from_api_repr(api_response)
12691286

google/cloud/bigquery/enums.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -409,11 +409,11 @@ class BigLakeTableFormat(object):
409409
"""Apache Iceberg format."""
410410

411411

412-
class UpdateMode(str, enum.Enum):
412+
class UpdateMode(enum.Enum):
413413
"""Specifies the kind of information to update in a dataset."""
414414

415415
UPDATE_MODE_UNSPECIFIED = "UPDATE_MODE_UNSPECIFIED"
416-
"""The default value. Default to the UPDATE_FULL."""
416+
"""The default value. Behavior defaults to UPDATE_FULL."""
417417

418418
UPDATE_METADATA = "UPDATE_METADATA"
419419
"""Includes metadata information for the dataset, such as friendlyName,

tests/unit/test_client.py

Lines changed: 79 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,8 @@
6060

6161
from google.cloud.bigquery import job as bqjob
6262
import google.cloud.bigquery._job_helpers
63-
from google.cloud.bigquery.dataset import DatasetReference
63+
from google.cloud.bigquery.dataset import DatasetReference, Dataset
64+
from google.cloud.bigquery.enums import UpdateMode
6465
from google.cloud.bigquery import exceptions
6566
from google.cloud.bigquery import ParquetOptions
6667
import google.cloud.bigquery.retry
@@ -2101,6 +2102,7 @@ def test_update_dataset(self):
21012102
},
21022103
path="/" + PATH,
21032104
timeout=7.5,
2105+
query_params={},
21042106
)
21052107
self.assertEqual(ds2.description, ds.description)
21062108
self.assertEqual(ds2.friendly_name, ds.friendly_name)
@@ -2114,56 +2116,94 @@ def test_update_dataset(self):
21142116
client.update_dataset(ds, [])
21152117
req = conn.api_request.call_args
21162118
self.assertEqual(req[1]["headers"]["If-Match"], "etag")
2117-
self.assertIsNone(req[1].get("query_params"))
2119+
self.assertEqual(req[1].get("query_params"), {})
21182120

21192121
def test_update_dataset_w_update_mode(self):
2120-
from google.cloud.bigquery.dataset import Dataset
2121-
from google.cloud.bigquery import enums
2122+
PATH = f"projects/{self.PROJECT}/datasets/{self.DS_ID}"
2123+
creds = _make_credentials()
2124+
client = self._make_one(project=self.PROJECT, credentials=creds)
21222125

2123-
PATH = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID)
21242126
DESCRIPTION = "DESCRIPTION"
21252127
RESOURCE = {
21262128
"datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID},
21272129
"etag": "etag",
21282130
"description": DESCRIPTION,
21292131
}
2132+
dataset_ref = DatasetReference(self.PROJECT, self.DS_ID)
2133+
orig_dataset = Dataset(dataset_ref)
2134+
orig_dataset.description = DESCRIPTION
2135+
filter_fields = ["description"]
2136+
2137+
test_cases = [
2138+
(None, None),
2139+
(UpdateMode.UPDATE_MODE_UNSPECIFIED, "UPDATE_MODE_UNSPECIFIED"),
2140+
(UpdateMode.UPDATE_METADATA, "UPDATE_METADATA"),
2141+
(UpdateMode.UPDATE_ACL, "UPDATE_ACL"),
2142+
(UpdateMode.UPDATE_FULL, "UPDATE_FULL"),
2143+
]
2144+
2145+
for update_mode_arg, expected_param_value in test_cases:
2146+
with self.subTest(
2147+
update_mode_arg=update_mode_arg,
2148+
expected_param_value=expected_param_value,
2149+
):
2150+
conn = client._connection = make_connection(RESOURCE, RESOURCE)
2151+
2152+
new_dataset = client.update_dataset(
2153+
orig_dataset,
2154+
fields=filter_fields,
2155+
update_mode=update_mode_arg,
2156+
)
2157+
self.assertEqual(orig_dataset.description, new_dataset.description)
2158+
2159+
if expected_param_value:
2160+
expected_query_params = {"updateMode": expected_param_value}
2161+
else:
2162+
expected_query_params = {}
2163+
2164+
conn.api_request.assert_called_once_with(
2165+
method="PATCH",
2166+
path="/" + PATH,
2167+
data={"description": DESCRIPTION},
2168+
timeout=DEFAULT_TIMEOUT,
2169+
query_params=expected_query_params if expected_query_params else {},
2170+
)
2171+
2172+
def test_update_dataset_w_invalid_update_mode(self):
21302173
creds = _make_credentials()
21312174
client = self._make_one(project=self.PROJECT, credentials=creds)
2132-
ds = Dataset(DatasetReference(self.PROJECT, self.DS_ID))
2133-
ds.description = DESCRIPTION
2134-
filter_fields = ["description"]
21352175

2136-
# Test each UpdateMode enum value
2137-
for update_mode_enum in enums.UpdateMode:
2138-
conn = client._connection = make_connection(RESOURCE)
2139-
ds2 = client.update_dataset(
2140-
ds,
2141-
fields=filter_fields,
2142-
update_mode=update_mode_enum,
2143-
)
2144-
self.assertEqual(ds2.description, ds.description)
2145-
conn.api_request.assert_called_once_with(
2146-
method="PATCH",
2147-
data={"description": DESCRIPTION},
2148-
path="/" + PATH,
2149-
timeout=DEFAULT_TIMEOUT,
2150-
query_params={"updateMode": str(update_mode_enum.value)},
2151-
)
2176+
DESCRIPTION = "DESCRIPTION"
2177+
resource = {
2178+
"datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID},
2179+
"etag": "etag",
2180+
}
21522181

2153-
# Test when update_mode is not provided
2154-
conn = client._connection = make_connection(RESOURCE)
2155-
ds2 = client.update_dataset(
2156-
ds,
2157-
fields=filter_fields,
2158-
)
2159-
self.assertEqual(ds2.description, ds.description)
2160-
conn.api_request.assert_called_once_with(
2161-
method="PATCH",
2162-
data={"description": DESCRIPTION},
2163-
path="/" + PATH,
2164-
timeout=DEFAULT_TIMEOUT,
2165-
query_params=None, # Expect None or empty dict when not provided
2166-
)
2182+
dataset_ref = DatasetReference(self.PROJECT, self.DS_ID)
2183+
orig_dataset = Dataset(dataset_ref)
2184+
orig_dataset.description = DESCRIPTION
2185+
filter_fields = ["description"] # A non-empty list of fields is required
2186+
2187+
# Mock the connection to prevent actual API calls
2188+
# and to provide a minimal valid response if the call were to proceed.
2189+
conn = client._connection = make_connection(resource)
2190+
2191+
test_cases = [
2192+
"INVALID_STRING",
2193+
123,
2194+
123.45,
2195+
object(),
2196+
]
2197+
2198+
for invalid_update_mode in test_cases:
2199+
with self.subTest(invalid_update_mode=invalid_update_mode):
2200+
conn.api_request.reset_mock() # Reset mock for each sub-test
2201+
with self.assertRaises(AttributeError):
2202+
client.update_dataset(
2203+
orig_dataset,
2204+
fields=filter_fields,
2205+
update_mode=invalid_update_mode,
2206+
)
21672207

21682208
def test_update_dataset_w_custom_property(self):
21692209
# The library should handle sending properties to the API that are not
@@ -2195,6 +2235,7 @@ def test_update_dataset_w_custom_property(self):
21952235
data={"newAlphaProperty": "unreleased property"},
21962236
path=path,
21972237
timeout=DEFAULT_TIMEOUT,
2238+
query_params={},
21982239
)
21992240

22002241
self.assertEqual(dataset.dataset_id, self.DS_ID)

0 commit comments

Comments
 (0)