Skip to content

Commit f67aa41

Browse files
authored
chore: remove read_gbq for large tables test. (#1646)
1 parent 102d363 commit f67aa41

File tree

1 file changed

+0
-37
lines changed

1 file changed

+0
-37
lines changed

tests/system/large/test_session.py

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -23,43 +23,6 @@
2323
import bigframes.session._io.bigquery
2424

2525

26-
@pytest.mark.parametrize(
27-
("query_or_table", "index_col"),
28-
[
29-
pytest.param(
30-
"bigquery-public-data.patents_view.ipcr_201708",
31-
(),
32-
id="1g_table_w_default_index",
33-
),
34-
pytest.param(
35-
"bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2011",
36-
(),
37-
id="30g_table_w_default_index",
38-
),
39-
# TODO(chelsealin): Disable the long run tests until we have propertily
40-
# ordering support to avoid materializating any data.
41-
# # Adding default index to large tables would take much longer time,
42-
# # e.g. ~5 mins for a 100G table, ~20 mins for a 1T table.
43-
# pytest.param(
44-
# "bigquery-public-data.stackoverflow.post_history",
45-
# ["id"],
46-
# id="100g_table_w_unique_column_index",
47-
# ),
48-
# pytest.param(
49-
# "bigquery-public-data.wise_all_sky_data_release.all_wise",
50-
# ["cntr"],
51-
# id="1t_table_w_unique_column_index",
52-
# ),
53-
],
54-
)
55-
def test_read_gbq_for_large_tables(
56-
session: bigframes.Session, query_or_table, index_col
57-
):
58-
"""Verify read_gbq() is able to read large tables."""
59-
df = session.read_gbq(query_or_table, index_col=index_col)
60-
assert len(df.columns) != 0
61-
62-
6326
def test_close(session: bigframes.Session):
6427
# we will create two tables and confirm that they are deleted
6528
# when the session is closed

0 commit comments

Comments
 (0)