Skip to content

Commit 6f3decb

Browse files
issue #505 option to disable error logs in batch jobs
1 parent e106045 commit 6f3decb

File tree

3 files changed

+77
-11
lines changed

3 files changed

+77
-11
lines changed

openeo/rest/datacube.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2477,6 +2477,7 @@ def execute_batch(
24772477
job_options: Optional[dict] = None,
24782478
validate: Optional[bool] = None,
24792479
auto_add_save_result: bool = True,
2480+
log_error=True,
24802481
# TODO: deprecate `format_options` as keyword arguments
24812482
**format_options,
24822483
) -> BatchJob:
@@ -2494,6 +2495,7 @@ def execute_batch(
24942495
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
24952496
(overruling the connection's ``auto_validate`` setting).
24962497
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
2498+
:param log_error: whether to print error logs
24972499
24982500
.. versionchanged:: 0.32.0
24992501
Added ``auto_add_save_result`` option
@@ -2529,7 +2531,8 @@ def execute_batch(
25292531
)
25302532
return job.run_synchronous(
25312533
outputfile=outputfile,
2532-
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval
2534+
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval,
2535+
log_error=log_error
25332536
)
25342537

25352538
def create_job(

openeo/rest/job.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -236,19 +236,30 @@ def logs(
236236

237237
def run_synchronous(
238238
self, outputfile: Union[str, Path, None] = None,
239-
print=print, max_poll_interval=60, connection_retry_interval=30
239+
print=print, max_poll_interval=60, connection_retry_interval=30, log_error=True
240240
) -> BatchJob:
241-
"""Start the job, wait for it to finish and download result"""
241+
"""
242+
Start the job, wait for it to finish and download result
243+
244+
:param outputfile: The path of a file to which a result can be written
245+
:param print: print/logging function to show progress/status
246+
:param max_poll_interval: maximum number of seconds to sleep between status polls
247+
:param connection_retry_interval: how long to wait when status poll failed due to connection issue
248+
:param log_error: whether to print error logs
249+
:return:
250+
"""
242251
self.start_and_wait(
243-
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval
252+
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval,
253+
log_error=log_error
244254
)
245255
# TODO #135 support multi file result sets too?
246256
if outputfile is not None:
247257
self.download_result(outputfile)
248258
return self
249259

250260
def start_and_wait(
251-
self, print=print, max_poll_interval: int = 60, connection_retry_interval: int = 30, soft_error_max=10
261+
self, print=print, max_poll_interval: int = 60, connection_retry_interval: int = 30, soft_error_max=10,
262+
log_error=True
252263
) -> BatchJob:
253264
"""
254265
Start the batch job, poll its status and wait till it finishes (or fails)
@@ -257,6 +268,7 @@ def start_and_wait(
257268
:param max_poll_interval: maximum number of seconds to sleep between status polls
258269
:param connection_retry_interval: how long to wait when status poll failed due to connection issue
259270
:param soft_error_max: maximum number of soft errors (e.g. temporary connection glitches) to allow
271+
:param log_error: whether to print error logs
260272
:return:
261273
"""
262274
# TODO rename `connection_retry_interval` to something more generic?
@@ -314,13 +326,13 @@ def soft_error(message: str):
314326
poll_interval = min(1.25 * poll_interval, max_poll_interval)
315327

316328
if status != "finished":
317-
# TODO: allow to disable this printing logs (e.g. in non-interactive contexts)?
318329
# TODO: render logs jupyter-aware in a notebook context?
319-
print(f"Your batch job {self.job_id!r} failed. Error logs:")
320-
print(self.logs(level=logging.ERROR))
321-
print(
322-
f"Full logs can be inspected in an openEO (web) editor or with `connection.job({self.job_id!r}).logs()`."
323-
)
330+
if log_error:
331+
print(f"Your batch job {self.job_id!r} failed. Error logs:")
332+
print(self.logs(level=logging.ERROR))
333+
print(
334+
f"Full logs can be inspected in an openEO (web) editor or with `connection.job({self.job_id!r}).logs()`."
335+
)
324336
raise JobFailedException(
325337
f"Batch job {self.job_id!r} didn't finish successfully. Status: {status} (after {elapsed()}).",
326338
job=self,

tests/rest/test_job.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,57 @@ def test_execute_batch_with_error(con100, requests_mock, tmpdir):
150150
"Full logs can be inspected in an openEO (web) editor or with `connection.job('f00ba5').logs()`.",
151151
]
152152

153+
def test_execute_batch_with_error_with_error_logs_disabled(con100, requests_mock, tmpdir):
154+
requests_mock.get(API_URL + "/file_formats", json={"output": {"GTiff": {"gis_data_types": ["raster"]}}})
155+
requests_mock.get(API_URL + "/collections/SENTINEL2", json={"foo": "bar"})
156+
requests_mock.post(API_URL + "/jobs", status_code=201, headers={"OpenEO-Identifier": "f00ba5"})
157+
requests_mock.post(API_URL + "/jobs/f00ba5/results", status_code=202)
158+
requests_mock.get(
159+
API_URL + "/jobs/f00ba5",
160+
[
161+
{"json": {"status": "submitted"}},
162+
{"json": {"status": "queued"}},
163+
{"json": {"status": "running", "progress": 15}},
164+
{"json": {"status": "running", "progress": 80}},
165+
{"json": {"status": "error", "progress": 100}},
166+
],
167+
)
168+
requests_mock.get(
169+
API_URL + "/jobs/f00ba5/logs",
170+
json={
171+
"logs": [
172+
{"id": "12", "level": "info", "message": "starting"},
173+
{"id": "34", "level": "error", "message": "nope"},
174+
]
175+
},
176+
)
177+
178+
path = tmpdir.join("tmp.tiff")
179+
log = []
180+
181+
try:
182+
with fake_time():
183+
con100.load_collection("SENTINEL2").execute_batch(
184+
outputfile=path, out_format="GTIFF",
185+
max_poll_interval=.1, print=log.append, log_error=False
186+
)
187+
pytest.fail("execute_batch should fail")
188+
except JobFailedException as e:
189+
assert e.job.status() == "error"
190+
assert [(l.level, l.message) for l in e.job.logs()] == [
191+
("info", "starting"),
192+
("error", "nope"),
193+
]
194+
195+
assert log == [
196+
"0:00:01 Job 'f00ba5': send 'start'",
197+
"0:00:02 Job 'f00ba5': submitted (progress N/A)",
198+
"0:00:04 Job 'f00ba5': queued (progress N/A)",
199+
"0:00:07 Job 'f00ba5': running (progress 15%)",
200+
"0:00:12 Job 'f00ba5': running (progress 80%)",
201+
"0:00:20 Job 'f00ba5': error (progress 100%)",
202+
]
203+
153204

154205
@pytest.mark.parametrize(["error_response", "expected"], [
155206
(

0 commit comments

Comments
 (0)