Skip to content

Commit 79c44bc

Browse files
fix(job): honor custom retry in job.result()
The `_AsyncJob.result()` method was not correctly passing the `retry` argument to the superclass's `result()` method when the `retry` object was the same as the default retry object. This caused the default retry settings to be ignored in some cases. This change modifies the `result()` method to always pass the `retry` argument to the superclass, ensuring that the provided retry settings are always honored. A new test case is added to verify that `job.result()` correctly handles both the default retry and a custom retry object.
1 parent 7cad6cf commit 79c44bc

File tree

2 files changed

+43
-1
lines changed

2 files changed

+43
-1
lines changed

google/cloud/bigquery/job/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1044,7 +1044,7 @@ def result( # type: ignore # (incompatible with supertype)
10441044
if self.state is None:
10451045
self._begin(retry=retry, timeout=timeout)
10461046

1047-
kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry}
1047+
kwargs = {"retry": retry}
10481048
return super(_AsyncJob, self).result(timeout=timeout, **kwargs)
10491049

10501050
def cancelled(self):

tests/unit/test_job_retry.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -615,3 +615,45 @@ def test_query_and_wait_retries_job_for_DDL_queries(global_time_lock):
615615
_, kwargs = calls[3]
616616
assert kwargs["method"] == "POST"
617617
assert kwargs["path"] == query_request_path
618+
619+
620+
@pytest.mark.parametrize(
621+
"result_retry",
622+
[
623+
pytest.param(
624+
{},
625+
id="default retry",
626+
),
627+
pytest.param(
628+
{"retry": google.cloud.bigquery.retry.DEFAULT_RETRY.with_timeout(timeout=10.0)},
629+
id="custom retry object",
630+
),
631+
],
632+
)
633+
def test_retry_load_job_result(result_retry, PROJECT, DS_ID):
634+
from google.cloud.bigquery.dataset import DatasetReference
635+
from google.cloud.bigquery.job.load import LoadJob
636+
637+
client = make_client()
638+
conn = client._connection = make_connection(
639+
dict(
640+
status=dict(state="RUNNING"),
641+
jobReference={"jobId": "id_1"},
642+
),
643+
google.api_core.exceptions.ServiceUnavailable("retry me"),
644+
dict(
645+
status=dict(state="DONE"),
646+
jobReference={"jobId": "id_1"},
647+
statistics={"load": {"outputRows": 1}},
648+
),
649+
)
650+
651+
table_ref = DatasetReference(project=PROJECT, dataset_id=DS_ID).table("new_table")
652+
job = LoadJob("id_1", source_uris=None, destination=table_ref, client=client)
653+
result = job.result(**result_retry)
654+
655+
assert job.state == "DONE"
656+
assert result.output_rows == 1
657+
658+
# We made all the calls we expected to.
659+
assert conn.api_request.call_count == 3

0 commit comments

Comments
 (0)