Skip to content

Commit 95b2fdf

Browse files
committed
supply allow_large_results=False when max_results is set
1 parent 766640a commit 95b2fdf

File tree

2 files changed

+12
-2
lines changed

2 files changed

+12
-2
lines changed

bigframes/session/loader.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -721,6 +721,9 @@ def read_gbq_table(
721721
columns=columns,
722722
use_cache=use_cache,
723723
dry_run=dry_run,
724+
# If max_results has been set, we almost certainly have < 10 GB
725+
# of results.
726+
allow_large_results=False,
724727
)
725728
return df
726729

@@ -1040,7 +1043,14 @@ def read_gbq_query(
10401043
# local node. Likely there are a wide range of sizes in which it
10411044
# makes sense to download the results beyond the first page, even if
10421045
# there is a job and destination table available.
1043-
if rows is not None and destination is None:
1046+
if (
1047+
rows is not None
1048+
and destination is None
1049+
and (
1050+
query_job_for_metrics is None
1051+
or query_job_for_metrics.statement_type == "SELECT"
1052+
)
1053+
):
10441054
return bf_read_gbq_query.create_dataframe_from_row_iterator(
10451055
rows,
10461056
session=self._session,

tests/system/small/test_session.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -619,7 +619,7 @@ def test_read_gbq_wildcard(
619619
pytest.param(
620620
{"query": {"useQueryCache": False, "maximumBytesBilled": "100"}},
621621
marks=pytest.mark.xfail(
622-
raises=google.api_core.exceptions.InternalServerError,
622+
raises=google.api_core.exceptions.BadRequest,
623623
reason="Expected failure when the query exceeds the maximum bytes billed limit.",
624624
),
625625
),

0 commit comments

Comments
 (0)