Skip to content

Commit 78605c1

Browse files
committed
Fix job runner missing from data store for submit-failed jobs
Remove spurious submit-failed dummy job in task proxy job list
1 parent 92175e2 commit 78605c1

File tree

2 files changed

+36
-20
lines changed

2 files changed

+36
-20
lines changed

cylc/flow/task_job_mgr.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1286,6 +1286,12 @@ def _prep_submit_task_job(
12861286
# bad_hosts:
12871287
self.bad_hosts -= exc.hosts_consumed
12881288
self._set_retry_timers(itask, rtconfig)
1289+
# Provide dummy platform otherwise it will incorrectly show as
1290+
# the default localhost platform in the data store:
1291+
itask.platform = {
1292+
'name': rtconfig['platform'],
1293+
'job runner': '',
1294+
}
12891295
self._prep_submit_task_job_error(itask, msg, exc)
12901296
return False
12911297

@@ -1337,15 +1343,20 @@ def _prep_submit_task_job_error(
13371343
itask.is_manual_submit = False
13381344
# job failed in preparation i.e. is really preparation-failed rather
13391345
# than submit-failed
1340-
# provide a dummy job config - this info will be added to the data
1341-
# store
13421346
try_num = itask.get_try_num()
1343-
itask.jobs.append({
1344-
'task_id': itask.identity,
1345-
'platform': itask.platform,
1346-
'submit_num': itask.submit_num,
1347-
'try_num': try_num,
1348-
})
1347+
if not itask.jobs or (
1348+
itask.jobs[-1]['submit_num'] != itask.submit_num
1349+
):
1350+
# provide a dummy job config - this info will be added to the data
1351+
# store
1352+
itask.jobs.append({
1353+
'task_id': itask.identity,
1354+
'platform': itask.platform,
1355+
'job_runner_name': itask.platform['job runner'],
1356+
'submit_num': itask.submit_num,
1357+
'try_num': try_num,
1358+
'flow_nums': itask.flow_nums,
1359+
})
13491360
# create a DB entry for the submit-failed job
13501361
self.workflow_db_mgr.put_insert_task_jobs(
13511362
itask,
@@ -1415,10 +1426,10 @@ def get_execution_time_limit(
14151426

14161427
def get_job_conf(
14171428
self,
1418-
itask,
1419-
rtconfig,
1420-
job_file_path=None,
1421-
job_d=None,
1429+
itask: 'TaskProxy',
1430+
rtconfig: dict,
1431+
job_file_path: Optional[str] = None,
1432+
job_d: Optional[str] = None,
14221433
):
14231434
"""Return a job config.
14241435

tests/integration/test_task_events_mgr.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -144,20 +144,22 @@ async def test__always_insert_task_job(
144144
[platforms]
145145
[[broken1]]
146146
hosts = no-such-host-1
147+
job runner = abc
147148
[[broken2]]
148149
hosts = no-such-host-2
150+
job runner = def
149151
[platform groups]
150-
[[broken]]
152+
[[broken_group]]
151153
platforms = broken1
152154
"""
153155
mock_glbl_cfg('cylc.flow.platforms.glbl_cfg', global_config)
154156

155157
id_ = flow({
156-
'scheduling': {'graph': {'R1': 'broken & broken2'}},
158+
'scheduling': {'graph': {'R1': 'foo & bar'}},
157159
'runtime': {
158160
'root': {'submission retry delays': 'PT10M'},
159-
'broken': {'platform': 'broken'},
160-
'broken2': {'platform': 'broken2'}
161+
'foo': {'platform': 'broken_group'},
162+
'bar': {'platform': 'broken2'}
161163
}
162164
})
163165

@@ -174,14 +176,17 @@ async def test__always_insert_task_job(
174176
)
175177

176178
# Both jobs are in the data store with submit-failed state:
179+
ds_jobs = schd.data_store_mgr.data[schd.id][JOBS]
177180
updates = {
178-
k.split('//')[-1]: v.state
179-
for k, v in schd.data_store_mgr.data[schd.id][JOBS].items()
181+
id_.split('//')[-1]: (job.state, job.platform, job.job_runner_name)
182+
for id_, job in ds_jobs.items()
180183
}
181184
assert updates == {
182-
'1/broken/01': 'submit-failed',
183-
'1/broken2/01': 'submit-failed'
185+
'1/foo/01': ('submit-failed', 'broken_group', ''),
186+
'1/bar/01': ('submit-failed', 'broken2', 'def'),
184187
}
188+
for job in ds_jobs.values():
189+
assert job.submitted_time
185190

186191

187192
async def test__process_message_failed_with_retry(one, start, log_filter):

0 commit comments

Comments
 (0)