7070# Protect against environments where bigquery library is not available.
7171try :
7272 import regex
73+ import threading
74+ from cachetools import TTLCache , cachedmethod
75+ from cachetools .keys import hashkey
7376 from apitools .base .py .exceptions import HttpError
7477 from apitools .base .py .exceptions import HttpForbiddenError
7578 from apitools .base .py .transfer import Upload
@@ -359,6 +362,9 @@ class BigQueryWrapper(object):
359362
360363 HISTOGRAM_METRIC_LOGGER = MetricLogger ()
361364
365+ _TABLE_CACHE_MAX_SIZE = 1024
366+ _TABLE_CACHE_TTL_SECONDS = 300
367+
362368 def __init__ (self , client = None , temp_dataset_id = None , temp_table_ref = None ):
363369 self .client = client or BigQueryWrapper ._bigquery_client (PipelineOptions ())
364370 self .gcp_bq_client = client or gcp_bigquery .Client (
@@ -393,6 +399,14 @@ def __init__(self, client=None, temp_dataset_id=None, temp_table_ref=None):
393399 self ._temporary_table_suffix = uuid .uuid4 ().hex
394400 self .temp_dataset_id = temp_dataset_id or self ._get_temp_dataset ()
395401
402+ self ._table_cache_ttl_secs = self ._TABLE_CACHE_TTL_SECONDS
403+ self ._table_cache_maxsize = self ._TABLE_CACHE_MAX_SIZE
404+ self ._table_cache = TTLCache (
405+ maxsize = self ._table_cache_maxsize ,
406+ ttl = self ._table_cache_ttl_secs ,
407+ )
408+ self ._table_cache_lock = threading .RLock ()
409+
396410 self .created_temp_dataset = False
397411
398412 @property
@@ -789,10 +803,22 @@ def _insert_all_rows(
789803 return not errors , errors
790804
791805 @retry .with_exponential_backoff (
792- num_retries = MAX_RETRIES ,
793- retry_filter = retry .retry_on_server_errors_timeout_or_quota_issues_filter )
806+ num_retries = MAX_RETRIES ,
807+ retry_filter = retry .retry_on_server_errors_timeout_or_quota_issues_filter )
808+ def _get_table_uncached (self , project_id , dataset_id , table_id ):
809+ """Uncached tables.get call with retry/backoff."""
810+ request = bigquery .BigqueryTablesGetRequest (
811+ projectId = project_id , datasetId = dataset_id , tableId = table_id )
812+ return self .client .tables .Get (request )
813+
814+ @cachedmethod (
815+ cache = lambda self : self ._table_cache ,
816+ lock = lambda self : self ._table_cache_lock ,
817+ key = lambda self , project_id , dataset_id , table_id :
818+ hashkey (project_id , dataset_id , table_id ),
819+ )
794820 def get_table (self , project_id , dataset_id , table_id ):
795- """Lookup a table's metadata object.
821+ """Lookup a table's metadata object (cached with TTL, thread-safe) .
796822
797823 Args:
798824 client: bigquery.BigqueryV2 instance
@@ -805,10 +831,7 @@ def get_table(self, project_id, dataset_id, table_id):
805831 Raises:
806832 HttpError: if lookup failed.
807833 """
808- request = bigquery .BigqueryTablesGetRequest (
809- projectId = project_id , datasetId = dataset_id , tableId = table_id )
810- response = self .client .tables .Get (request )
811- return response
834+ return self ._get_table_uncached (project_id , dataset_id , table_id )
812835
813836 def _create_table (
814837 self ,
0 commit comments