3131 EventKey ,
3232 TaskJobLogsRetrieveContext ,
3333)
34+ from cylc .flow .task_state import (
35+ TASK_STATUS_PREPARING ,
36+ TASK_STATUS_SUBMIT_FAILED ,
37+ )
3438
3539from .test_workflow_events import TEMPLATES
3640
@@ -126,7 +130,7 @@ async def test__insert_task_job(flow, one_conf, scheduler, start, validate):
126130
127131
128132async def test__always_insert_task_job (
129- flow , scheduler , mock_glbl_cfg , start , run
133+ flow , scheduler , mock_glbl_cfg , start
130134):
131135 """Insert Task Job _Always_ inserts a task into the data store.
132136
@@ -144,20 +148,22 @@ async def test__always_insert_task_job(
144148 [platforms]
145149 [[broken1]]
146150 hosts = no-such-host-1
151+ job runner = abc
147152 [[broken2]]
148153 hosts = no-such-host-2
154+ job runner = def
149155 [platform groups]
150- [[broken ]]
156+ [[broken_group ]]
151157 platforms = broken1
152158 """
153159 mock_glbl_cfg ('cylc.flow.platforms.glbl_cfg' , global_config )
154160
155161 id_ = flow ({
156- 'scheduling' : {'graph' : {'R1' : 'broken & broken2 ' }},
162+ 'scheduling' : {'graph' : {'R1' : 'foo & bar ' }},
157163 'runtime' : {
158164 'root' : {'submission retry delays' : 'PT10M' },
159- 'broken ' : {'platform' : 'broken ' },
160- 'broken2 ' : {'platform' : 'broken2' }
165+ 'foo ' : {'platform' : 'broken_group ' },
166+ 'bar ' : {'platform' : 'broken2' }
161167 }
162168 })
163169
@@ -174,14 +180,57 @@ async def test__always_insert_task_job(
174180 )
175181
176182 # Both jobs are in the data store with submit-failed state:
183+ ds_jobs = schd .data_store_mgr .data [schd .id ][JOBS ]
177184 updates = {
178- k .split ('//' )[- 1 ]: v .state
179- for k , v in schd . data_store_mgr . data [ schd . id ][ JOBS ] .items ()
185+ id_ .split ('//' )[- 1 ]: ( job .state , job . platform , job . job_runner_name )
186+ for id_ , job in ds_jobs .items ()
180187 }
181188 assert updates == {
182- '1/broken /01' : 'submit-failed' ,
183- '1/broken2 /01' : 'submit-failed'
189+ '1/foo /01' : ( 'submit-failed' , 'broken_group' , '' ) ,
190+ '1/bar /01' : ( 'submit-failed' , 'broken2' , 'def' ),
184191 }
192+ for job in ds_jobs .values ():
193+ assert job .submitted_time
194+
195+
196+ async def test__submit_failed_job_id (flow , scheduler , start , db_select ):
197+ """If a job is killed in the submitted state, the job ID should still be
198+ in the DB/data store.
199+
200+ See https://github.com/cylc/cylc-flow/pull/6926
201+ """
202+ async def get_ds_job_id (schd : Scheduler ):
203+ await schd .update_data_structure ()
204+ return list (schd .data_store_mgr .data [schd .id ][JOBS ].values ())[0 ].job_id
205+
206+ id_ = flow ('foo' )
207+ schd : Scheduler = scheduler (id_ )
208+ job_id = '1234'
209+ async with start (schd ):
210+ itask = schd .pool .get_tasks ()[0 ]
211+ itask .state_reset (TASK_STATUS_PREPARING )
212+ itask .submit_num = 1
213+ itask .summary ['submit_method_id' ] = job_id
214+ schd .workflow_db_mgr .put_insert_task_jobs (itask , {})
215+ schd .task_events_mgr .process_message (
216+ itask , 'INFO' , schd .task_events_mgr .EVENT_SUBMITTED
217+ )
218+ assert await get_ds_job_id (schd ) == job_id
219+
220+ schd .task_events_mgr .process_message (
221+ itask , 'CRITICAL' , schd .task_events_mgr .EVENT_SUBMIT_FAILED
222+ )
223+ assert itask .state (TASK_STATUS_SUBMIT_FAILED )
224+ assert await get_ds_job_id (schd ) == job_id
225+
226+ assert db_select (schd , False , 'task_jobs' , 'job_id' , 'submit_status' ) == [
227+ (job_id , 1 )
228+ ]
229+
230+ # Restart and check data store again:
231+ schd = scheduler (id_ )
232+ async with start (schd ):
233+ assert await get_ds_job_id (schd ) == job_id
185234
186235
187236async def test__process_message_failed_with_retry (one , start , log_filter ):
0 commit comments