@@ -247,12 +247,14 @@ def test_create_dataset(self):
247247 def test_get_dataset (self ):
248248 dataset_id = _make_dataset_id ("get_dataset" )
249249 client = Config .CLIENT
250- dataset_arg = Dataset (client .dataset (dataset_id ))
250+ project = client .project
251+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
252+ dataset_arg = Dataset (dataset_ref )
251253 dataset_arg .friendly_name = "Friendly"
252254 dataset_arg .description = "Description"
253255 dataset = retry_403 (client .create_dataset )(dataset_arg )
254256 self .to_delete .append (dataset )
255- dataset_ref = client . dataset ( dataset_id )
257+ dataset_ref = bigquery . DatasetReference ( project , dataset_id )
256258
257259 # Get with a reference.
258260 got = client .get_dataset (dataset_ref )
@@ -416,17 +418,18 @@ def test_create_table_w_time_partitioning_w_clustering_fields(self):
416418
417419 def test_delete_dataset_with_string (self ):
418420 dataset_id = _make_dataset_id ("delete_table_true" )
419- dataset_ref = Config .CLIENT .dataset (dataset_id )
421+ project = Config .CLIENT .project
422+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
420423 retry_403 (Config .CLIENT .create_dataset )(Dataset (dataset_ref ))
421424 self .assertTrue (_dataset_exists (dataset_ref ))
422425 Config .CLIENT .delete_dataset (dataset_id )
423426 self .assertFalse (_dataset_exists (dataset_ref ))
424427
425428 def test_delete_dataset_delete_contents_true (self ):
426429 dataset_id = _make_dataset_id ("delete_table_true" )
427- dataset = retry_403 ( Config .CLIENT .create_dataset )(
428- Dataset ( Config . CLIENT . dataset ( dataset_id ) )
429- )
430+ project = Config .CLIENT .project
431+ dataset_ref = bigquery . DatasetReference ( project , dataset_id )
432+ dataset = retry_403 ( Config . CLIENT . create_dataset )( Dataset ( dataset_ref ) )
430433
431434 table_id = "test_table"
432435 table_arg = Table (dataset .table (table_id ), schema = SCHEMA )
@@ -1363,7 +1366,9 @@ def test_extract_table(self):
13631366 source_blob_name = "person_ages.csv"
13641367 dataset_id = _make_dataset_id ("load_gcs_then_extract" )
13651368 table_id = "test_table"
1366- table_ref = Config .CLIENT .dataset (dataset_id ).table (table_id )
1369+ project = Config .CLIENT .project
1370+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
1371+ table_ref = dataset_ref .table (table_id )
13671372 table = Table (table_ref )
13681373 self .to_delete .insert (0 , table )
13691374 bucket = self ._create_bucket (bucket_name )
@@ -1546,8 +1551,10 @@ def test_query_w_wrong_config(self):
15461551 rows = list (Config .CLIENT .query ("SELECT 1;" ).result ())
15471552 assert rows [0 ][0 ] == 1
15481553
1554+ project = Config .CLIENT .project
1555+ dataset_ref = bigquery .DatasetReference (project , "dset" )
15491556 bad_config = LoadJobConfig ()
1550- bad_config .destination = Config . CLIENT . dataset ( "dset" ) .table ("tbl" )
1557+ bad_config .destination = dataset_ref .table ("tbl" )
15511558 with self .assertRaises (Exception ):
15521559 Config .CLIENT .query (good_query , job_config = bad_config ).result ()
15531560
@@ -2678,7 +2685,9 @@ def test_list_rows_max_results_w_bqstorage(self):
26782685 self .assertEqual (len (dataframe .index ), 100 )
26792686
26802687 def temp_dataset (self , dataset_id , location = None ):
2681- dataset = Dataset (Config .CLIENT .dataset (dataset_id ))
2688+ project = Config .CLIENT .project
2689+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
2690+ dataset = Dataset (dataset_ref )
26822691 if location :
26832692 dataset .location = location
26842693 dataset = retry_403 (Config .CLIENT .create_dataset )(dataset )
0 commit comments