Skip to content

Commit bb5c06c

Browse files
authored
Merge branch 'main' into pangea-v1alpha
2 parents 07bc30a + 40529de commit bb5c06c

File tree

3 files changed

+99
-15
lines changed

3 files changed

+99
-15
lines changed

google/cloud/bigquery/client.py

Lines changed: 21 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,8 @@
4444
import uuid
4545
import warnings
4646

47+
import requests
48+
4749
from google import resumable_media # type: ignore
4850
from google.resumable_media.requests import MultipartUpload # type: ignore
4951
from google.resumable_media.requests import ResumableUpload
@@ -65,6 +67,7 @@
6567
DEFAULT_BQSTORAGE_CLIENT_INFO = None # type: ignore
6668

6769

70+
from google.auth.credentials import Credentials
6871
from google.cloud.bigquery._http import Connection
6972
from google.cloud.bigquery import _job_helpers
7073
from google.cloud.bigquery import _pandas_helpers
@@ -126,15 +129,14 @@
126129
_versions_helpers.PANDAS_VERSIONS.try_import()
127130
) # mypy check fails because pandas import is outside module, there are type: ignore comments related to this
128131

132+
129133
ResumableTimeoutType = Union[
130134
None, float, Tuple[float, float]
131135
] # for resumable media methods
132136

133137
if typing.TYPE_CHECKING: # pragma: NO COVER
134138
# os.PathLike is only subscriptable in Python 3.9+, thus shielding with a condition.
135139
PathType = Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]
136-
import requests # required by api-core
137-
138140
_DEFAULT_CHUNKSIZE = 100 * 1024 * 1024 # 100 MB
139141
_MAX_MULTIPART_SIZE = 5 * 1024 * 1024
140142
_DEFAULT_NUM_RETRIES = 6
@@ -231,30 +233,34 @@ class Client(ClientWithProject):
231233

232234
def __init__(
233235
self,
234-
project=None,
235-
credentials=None,
236-
_http=None,
237-
location=None,
238-
default_query_job_config=None,
239-
default_load_job_config=None,
240-
client_info=None,
241-
client_options=None,
236+
project: Optional[str] = None,
237+
credentials: Optional[Credentials] = None,
238+
_http: Optional[requests.Session] = None,
239+
location: Optional[str] = None,
240+
default_query_job_config: Optional[QueryJobConfig] = None,
241+
default_load_job_config: Optional[LoadJobConfig] = None,
242+
client_info: Optional[google.api_core.client_info.ClientInfo] = None,
243+
client_options: Optional[
244+
Union[google.api_core.client_options.ClientOptions, Dict[str, Any]]
245+
] = None,
242246
) -> None:
247+
if client_options is None:
248+
client_options = {}
249+
if isinstance(client_options, dict):
250+
client_options = google.api_core.client_options.from_dict(client_options)
251+
# assert isinstance(client_options, google.api_core.client_options.ClientOptions)
252+
243253
super(Client, self).__init__(
244254
project=project,
245255
credentials=credentials,
246256
client_options=client_options,
247257
_http=_http,
248258
)
249259

250-
kw_args = {"client_info": client_info}
260+
kw_args: Dict[str, Any] = {"client_info": client_info}
251261
bq_host = _get_bigquery_host()
252262
kw_args["api_endpoint"] = bq_host if bq_host != _DEFAULT_HOST else None
253263
client_universe = None
254-
if client_options is None:
255-
client_options = {}
256-
if isinstance(client_options, dict):
257-
client_options = google.api_core.client_options.from_dict(client_options)
258264
if client_options.api_endpoint:
259265
api_endpoint = client_options.api_endpoint
260266
kw_args["api_endpoint"] = api_endpoint

google/cloud/bigquery/table.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -410,6 +410,7 @@ class Table(_TableBase):
410410
"require_partition_filter": "requirePartitionFilter",
411411
"table_constraints": "tableConstraints",
412412
"external_catalog_table_options": "externalCatalogTableOptions",
413+
"max_staleness": "maxStaleness",
413414
}
414415

415416
def __init__(self, table_ref, schema=None) -> None:
@@ -1140,6 +1141,40 @@ def __repr__(self):
11401141
def __str__(self):
11411142
return f"{self.project}.{self.dataset_id}.{self.table_id}"
11421143

1144+
@property
1145+
def max_staleness(self):
1146+
"""Union[str, None]: The maximum staleness of data that could be returned when the table is queried.
1147+
1148+
Staleness encoded as a string encoding of sql IntervalValue type.
1149+
This property is optional and defaults to None.
1150+
1151+
According to the BigQuery API documentation, maxStaleness specifies the maximum time
1152+
interval for which stale data can be returned when querying the table.
1153+
It helps control data freshness in scenarios like metadata-cached external tables.
1154+
1155+
Returns:
1156+
Optional[str]: A string representing the maximum staleness interval
1157+
(e.g., '1h', '30m', '15s' for hours, minutes, seconds respectively).
1158+
"""
1159+
return self._properties.get(self._PROPERTY_TO_API_FIELD["max_staleness"])
1160+
1161+
@max_staleness.setter
1162+
def max_staleness(self, value):
1163+
"""Set the maximum staleness for the table.
1164+
1165+
Args:
1166+
value (Optional[str]): A string representing the maximum staleness interval.
1167+
Must be a valid time interval string.
1168+
Examples include '1h' (1 hour), '30m' (30 minutes), '15s' (15 seconds).
1169+
1170+
Raises:
1171+
ValueError: If the value is not None and not a string.
1172+
"""
1173+
if value is not None and not isinstance(value, str):
1174+
raise ValueError("max_staleness must be a string or None")
1175+
1176+
self._properties[self._PROPERTY_TO_API_FIELD["max_staleness"]] = value
1177+
11431178

11441179
class TableListItem(_TableBase):
11451180
"""A read-only table resource from a list operation.

tests/unit/test_table.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1476,6 +1476,49 @@ def test___str__(self):
14761476
table1 = self._make_one(TableReference(dataset, "table1"))
14771477
self.assertEqual(str(table1), "project1.dataset1.table1")
14781478

1479+
def test_max_staleness_getter(self):
1480+
"""Test getting max_staleness property."""
1481+
dataset = DatasetReference("test-project", "test_dataset")
1482+
table_ref = dataset.table("test_table")
1483+
table = self._make_one(table_ref)
1484+
# Initially None
1485+
self.assertIsNone(table.max_staleness)
1486+
# Set max_staleness using setter
1487+
table.max_staleness = "1h"
1488+
self.assertEqual(table.max_staleness, "1h")
1489+
1490+
def test_max_staleness_setter(self):
1491+
"""Test setting max_staleness property."""
1492+
dataset = DatasetReference("test-project", "test_dataset")
1493+
table_ref = dataset.table("test_table")
1494+
table = self._make_one(table_ref)
1495+
# Set valid max_staleness
1496+
table.max_staleness = "30m"
1497+
self.assertEqual(table.max_staleness, "30m")
1498+
# Set to None
1499+
table.max_staleness = None
1500+
self.assertIsNone(table.max_staleness)
1501+
1502+
def test_max_staleness_setter_invalid_type(self):
1503+
"""Test setting max_staleness with an invalid type raises ValueError."""
1504+
dataset = DatasetReference("test-project", "test_dataset")
1505+
table_ref = dataset.table("test_table")
1506+
table = self._make_one(table_ref)
1507+
# Try setting invalid type
1508+
with self.assertRaises(ValueError):
1509+
table.max_staleness = 123 # Not a string
1510+
1511+
def test_max_staleness_to_api_repr(self):
1512+
"""Test max_staleness is correctly represented in API representation."""
1513+
dataset = DatasetReference("test-project", "test_dataset")
1514+
table_ref = dataset.table("test_table")
1515+
table = self._make_one(table_ref)
1516+
# Set max_staleness
1517+
table.max_staleness = "1h"
1518+
# Convert to API representation
1519+
resource = table.to_api_repr()
1520+
self.assertEqual(resource.get("maxStaleness"), "1h")
1521+
14791522

14801523
class Test_row_from_mapping(unittest.TestCase, _SchemaBase):
14811524
PROJECT = "prahj-ekt"

0 commit comments

Comments
 (0)