Skip to content

Commit f732d74

Browse files
issue #505 option to disable error logs in batch jobs
1 parent 07ecedc commit f732d74

File tree

3 files changed

+77
-11
lines changed

3 files changed

+77
-11
lines changed

openeo/rest/datacube.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2421,6 +2421,7 @@ def execute_batch(
24212421
job_options: Optional[dict] = None,
24222422
validate: Optional[bool] = None,
24232423
auto_add_save_result: bool = True,
2424+
enable_print=True,
24242425
# TODO: deprecate `format_options` as keyword arguments
24252426
**format_options,
24262427
) -> BatchJob:
@@ -2438,6 +2439,7 @@ def execute_batch(
24382439
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
24392440
(overruling the connection's ``auto_validate`` setting).
24402441
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
2442+
:param enable_print: whether to print error logs
24412443
24422444
.. versionchanged:: 0.32.0
24432445
Added ``auto_add_save_result`` option
@@ -2473,7 +2475,8 @@ def execute_batch(
24732475
)
24742476
return job.run_synchronous(
24752477
outputfile=outputfile,
2476-
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval
2478+
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval,
2479+
enable_print=enable_print
24772480
)
24782481

24792482
def create_job(

openeo/rest/job.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -236,19 +236,30 @@ def logs(
236236

237237
def run_synchronous(
238238
self, outputfile: Union[str, Path, None] = None,
239-
print=print, max_poll_interval=60, connection_retry_interval=30
239+
print=print, max_poll_interval=60, connection_retry_interval=30, enable_print=True
240240
) -> BatchJob:
241-
"""Start the job, wait for it to finish and download result"""
241+
"""
242+
Start the job, wait for it to finish and download result
243+
244+
:param outputfile: The path of a file to which a result can be written
245+
:param print: print/logging function to show progress/status
246+
:param max_poll_interval: maximum number of seconds to sleep between status polls
247+
:param connection_retry_interval: how long to wait when status poll failed due to connection issue
248+
:param enable_print: whether to print error logs
249+
:return:
250+
"""
242251
self.start_and_wait(
243-
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval
252+
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval,
253+
enable_print=enable_print
244254
)
245255
# TODO #135 support multi file result sets too?
246256
if outputfile is not None:
247257
self.download_result(outputfile)
248258
return self
249259

250260
def start_and_wait(
251-
self, print=print, max_poll_interval: int = 60, connection_retry_interval: int = 30, soft_error_max=10
261+
self, print=print, max_poll_interval: int = 60, connection_retry_interval: int = 30, soft_error_max=10,
262+
enable_print=True
252263
) -> BatchJob:
253264
"""
254265
Start the batch job, poll its status and wait till it finishes (or fails)
@@ -257,6 +268,7 @@ def start_and_wait(
257268
:param max_poll_interval: maximum number of seconds to sleep between status polls
258269
:param connection_retry_interval: how long to wait when status poll failed due to connection issue
259270
:param soft_error_max: maximum number of soft errors (e.g. temporary connection glitches) to allow
271+
:param enable_print: whether to print error logs
260272
:return:
261273
"""
262274
# TODO rename `connection_retry_interval` to something more generic?
@@ -314,13 +326,13 @@ def soft_error(message: str):
314326
poll_interval = min(1.25 * poll_interval, max_poll_interval)
315327

316328
if status != "finished":
317-
# TODO: allow to disable this printing logs (e.g. in non-interactive contexts)?
318329
# TODO: render logs jupyter-aware in a notebook context?
319-
print(f"Your batch job {self.job_id!r} failed. Error logs:")
320-
print(self.logs(level=logging.ERROR))
321-
print(
322-
f"Full logs can be inspected in an openEO (web) editor or with `connection.job({self.job_id!r}).logs()`."
323-
)
330+
if enable_print:
331+
print(f"Your batch job {self.job_id!r} failed. Error logs:")
332+
print(self.logs(level=logging.ERROR))
333+
print(
334+
f"Full logs can be inspected in an openEO (web) editor or with `connection.job({self.job_id!r}).logs()`."
335+
)
324336
raise JobFailedException(
325337
f"Batch job {self.job_id!r} didn't finish successfully. Status: {status} (after {elapsed()}).",
326338
job=self,

tests/rest/test_job.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -223,6 +223,57 @@ def test_execute_batch_with_soft_errors(con100, requests_mock, tmpdir, error_res
223223
assert path.read() == "tiffdata"
224224
assert job.logs() == []
225225

226+
def test_execute_batch_with_error_with_error_logs_disabled(con100, requests_mock, tmpdir):
227+
requests_mock.get(API_URL + "/file_formats", json={"output": {"GTiff": {"gis_data_types": ["raster"]}}})
228+
requests_mock.get(API_URL + "/collections/SENTINEL2", json={"foo": "bar"})
229+
requests_mock.post(API_URL + "/jobs", status_code=201, headers={"OpenEO-Identifier": "f00ba5"})
230+
requests_mock.post(API_URL + "/jobs/f00ba5/results", status_code=202)
231+
requests_mock.get(
232+
API_URL + "/jobs/f00ba5",
233+
[
234+
{"json": {"status": "submitted"}},
235+
{"json": {"status": "queued"}},
236+
{"json": {"status": "running", "progress": 15}},
237+
{"json": {"status": "running", "progress": 80}},
238+
{"json": {"status": "error", "progress": 100}},
239+
],
240+
)
241+
requests_mock.get(
242+
API_URL + "/jobs/f00ba5/logs",
243+
json={
244+
"logs": [
245+
{"id": "12", "level": "info", "message": "starting"},
246+
{"id": "34", "level": "error", "message": "nope"},
247+
]
248+
},
249+
)
250+
251+
path = tmpdir.join("tmp.tiff")
252+
log = []
253+
254+
try:
255+
with fake_time():
256+
con100.load_collection("SENTINEL2").execute_batch(
257+
outputfile=path, out_format="GTIFF",
258+
max_poll_interval=.1, print=log.append, enable_print=False
259+
)
260+
pytest.fail("execute_batch should fail")
261+
except JobFailedException as e:
262+
assert e.job.status() == "error"
263+
assert [(l.level, l.message) for l in e.job.logs()] == [
264+
("info", "starting"),
265+
("error", "nope"),
266+
]
267+
268+
assert log == [
269+
"0:00:01 Job 'f00ba5': send 'start'",
270+
"0:00:02 Job 'f00ba5': submitted (progress N/A)",
271+
"0:00:04 Job 'f00ba5': queued (progress N/A)",
272+
"0:00:07 Job 'f00ba5': running (progress 15%)",
273+
"0:00:12 Job 'f00ba5': running (progress 80%)",
274+
"0:00:20 Job 'f00ba5': error (progress 100%)",
275+
]
276+
226277

227278
@pytest.mark.parametrize(["error_response", "expected"], [
228279
(

0 commit comments

Comments
 (0)