4848def _make_client (* args , use_emulator = True , ** kwargs ):
4949 import os
5050 from google .cloud .bigtable .data ._async .client import BigtableDataClientAsync
51+
5152 env_mask = {}
5253 # by default, use emulator mode to avoid auth issues in CI
5354 # emulator mode must be disabled by tests that check channel pooling/refresh background tasks
@@ -271,7 +272,9 @@ async def test__start_background_channel_refresh_tasks_exist(self):
271272 @pytest .mark .parametrize ("pool_size" , [1 , 3 , 7 ])
272273 async def test__start_background_channel_refresh (self , pool_size ):
273274 # should create background tasks for each channel
274- client = self ._make_one (project = "project-id" , pool_size = pool_size , use_emulator = False )
275+ client = self ._make_one (
276+ project = "project-id" , pool_size = pool_size , use_emulator = False
277+ )
275278 ping_and_warm = AsyncMock ()
276279 client ._ping_and_warm_instances = ping_and_warm
277280 client ._start_background_channel_refresh ()
@@ -291,7 +294,9 @@ async def test__start_background_channel_refresh(self, pool_size):
291294 async def test__start_background_channel_refresh_tasks_names (self ):
292295 # if tasks exist, should do nothing
293296 pool_size = 3
294- client = self ._make_one (project = "project-id" , pool_size = pool_size , use_emulator = False )
297+ client = self ._make_one (
298+ project = "project-id" , pool_size = pool_size , use_emulator = False
299+ )
295300 for i in range (pool_size ):
296301 name = client ._channel_refresh_tasks [i ].get_name ()
297302 assert str (i ) in name
@@ -938,9 +943,13 @@ async def test_multiple_pool_sizes(self):
938943 # should be able to create multiple clients with different pool sizes without issue
939944 pool_sizes = [1 , 2 , 4 , 8 , 16 , 32 , 64 , 128 , 256 ]
940945 for pool_size in pool_sizes :
941- client = self ._make_one (project = "project-id" , pool_size = pool_size , use_emulator = False )
946+ client = self ._make_one (
947+ project = "project-id" , pool_size = pool_size , use_emulator = False
948+ )
942949 assert len (client ._channel_refresh_tasks ) == pool_size
943- client_duplicate = self ._make_one (project = "project-id" , pool_size = pool_size , use_emulator = False )
950+ client_duplicate = self ._make_one (
951+ project = "project-id" , pool_size = pool_size , use_emulator = False
952+ )
944953 assert len (client_duplicate ._channel_refresh_tasks ) == pool_size
945954 assert str (pool_size ) in str (client .transport )
946955 await client .close ()
@@ -953,7 +962,9 @@ async def test_close(self):
953962 )
954963
955964 pool_size = 7
956- client = self ._make_one (project = "project-id" , pool_size = pool_size , use_emulator = False )
965+ client = self ._make_one (
966+ project = "project-id" , pool_size = pool_size , use_emulator = False
967+ )
957968 assert len (client ._channel_refresh_tasks ) == pool_size
958969 tasks_list = list (client ._channel_refresh_tasks )
959970 for task in client ._channel_refresh_tasks :
@@ -1003,10 +1014,9 @@ async def test_context_manager(self):
10031014
10041015 def test_client_ctor_sync (self ):
10051016 # initializing client in a sync context should raise RuntimeError
1006- from google .cloud .bigtable .data ._async .client import BigtableDataClientAsync
10071017
10081018 with pytest .warns (RuntimeWarning ) as warnings :
1009- client = BigtableDataClientAsync (project = "project-id" )
1019+ client = _make_client (project = "project-id" )
10101020 expected_warning = [w for w in warnings if "client.py" in w .filename ]
10111021 assert len (expected_warning ) == 1
10121022 assert (
@@ -1020,7 +1030,6 @@ def test_client_ctor_sync(self):
10201030class TestTableAsync :
10211031 @pytest .mark .asyncio
10221032 async def test_table_ctor (self ):
1023- from google .cloud .bigtable .data ._async .client import BigtableDataClientAsync
10241033 from google .cloud .bigtable .data ._async .client import TableAsync
10251034 from google .cloud .bigtable .data ._async .client import _WarmedInstanceKey
10261035
@@ -1033,7 +1042,7 @@ async def test_table_ctor(self):
10331042 expected_read_rows_attempt_timeout = 0.5
10341043 expected_mutate_rows_operation_timeout = 2.5
10351044 expected_mutate_rows_attempt_timeout = 0.75
1036- client = BigtableDataClientAsync ()
1045+ client = _make_client ()
10371046 assert not client ._active_instances
10381047
10391048 table = TableAsync (
@@ -1088,12 +1097,11 @@ async def test_table_ctor_defaults(self):
10881097 """
10891098 should provide default timeout values and app_profile_id
10901099 """
1091- from google .cloud .bigtable .data ._async .client import BigtableDataClientAsync
10921100 from google .cloud .bigtable .data ._async .client import TableAsync
10931101
10941102 expected_table_id = "table-id"
10951103 expected_instance_id = "instance-id"
1096- client = BigtableDataClientAsync ()
1104+ client = _make_client ()
10971105 assert not client ._active_instances
10981106
10991107 table = TableAsync (
@@ -1119,10 +1127,9 @@ async def test_table_ctor_invalid_timeout_values(self):
11191127 """
11201128 bad timeout values should raise ValueError
11211129 """
1122- from google .cloud .bigtable .data ._async .client import BigtableDataClientAsync
11231130 from google .cloud .bigtable .data ._async .client import TableAsync
11241131
1125- client = BigtableDataClientAsync ()
1132+ client = _make_client ()
11261133
11271134 timeout_pairs = [
11281135 ("default_operation_timeout" , "default_attempt_timeout" ),
@@ -1240,10 +1247,8 @@ async def test_customizable_retryable_errors(
12401247 Test that retryable functions support user-configurable arguments, and that the configured retryables are passed
12411248 down to the gapic layer.
12421249 """
1243- from google .cloud .bigtable .data import BigtableDataClientAsync
1244-
12451250 with mock .patch (retry_fn_path ) as retry_fn_mock :
1246- async with BigtableDataClientAsync () as client :
1251+ async with _make_client () as client :
12471252 table = client .get_table ("instance-id" , "table-id" )
12481253 expected_predicate = lambda a : a in expected_retryables # noqa
12491254 retry_fn_mock .side_effect = RuntimeError ("stop early" )
@@ -1291,14 +1296,13 @@ async def test_customizable_retryable_errors(
12911296 async def test_call_metadata (self , include_app_profile , fn_name , fn_args , gapic_fn ):
12921297 """check that all requests attach proper metadata headers"""
12931298 from google .cloud .bigtable .data import TableAsync
1294- from google .cloud .bigtable .data import BigtableDataClientAsync
12951299
12961300 profile = "profile" if include_app_profile else None
12971301 with mock .patch (
12981302 f"google.cloud.bigtable_v2.BigtableAsyncClient.{ gapic_fn } " , mock .AsyncMock ()
12991303 ) as gapic_mock :
13001304 gapic_mock .side_effect = RuntimeError ("stop early" )
1301- async with BigtableDataClientAsync () as client :
1305+ async with _make_client () as client :
13021306 table = TableAsync (client , "instance-id" , "table-id" , profile )
13031307 try :
13041308 test_fn = table .__getattribute__ (fn_name )
@@ -1825,7 +1829,6 @@ async def test_row_exists(self, return_value, expected_result):
18251829
18261830
18271831class TestReadRowsSharded :
1828-
18291832 @pytest .mark .asyncio
18301833 async def test_read_rows_sharded_empty_query (self ):
18311834 async with _make_client () as client :
@@ -1984,7 +1987,6 @@ async def test_read_rows_sharded_batching(self):
19841987
19851988
19861989class TestSampleRowKeys :
1987-
19881990 async def _make_gapic_stream (self , sample_list : list [tuple [bytes , int ]]):
19891991 from google .cloud .bigtable_v2 .types import SampleRowKeysResponse
19901992
@@ -2133,7 +2135,6 @@ async def test_sample_row_keys_non_retryable_errors(self, non_retryable_exceptio
21332135
21342136
21352137class TestMutateRow :
2136-
21372138 @pytest .mark .asyncio
21382139 @pytest .mark .parametrize (
21392140 "mutation_arg" ,
@@ -2306,7 +2307,6 @@ async def test_mutate_row_no_mutations(self, mutations):
23062307
23072308
23082309class TestBulkMutateRows :
2309-
23102310 async def _mock_response (self , response_list ):
23112311 from google .cloud .bigtable_v2 .types import MutateRowsResponse
23122312 from google .rpc import status_pb2
@@ -2683,7 +2683,6 @@ async def test_bulk_mutate_error_recovery(self):
26832683
26842684
26852685class TestCheckAndMutateRow :
2686-
26872686 @pytest .mark .parametrize ("gapic_result" , [True , False ])
26882687 @pytest .mark .asyncio
26892688 async def test_check_and_mutate (self , gapic_result ):
@@ -2832,7 +2831,6 @@ async def test_check_and_mutate_mutations_parsing(self):
28322831
28332832
28342833class TestReadModifyWriteRow :
2835-
28362834 @pytest .mark .parametrize (
28372835 "call_rules,expected_rules" ,
28382836 [
0 commit comments