@@ -145,9 +145,16 @@ def main(args):
145145 except SkipJob :
146146 continue
147147
148- # lock machines but do not reimage them
149- if 'roles' in job_config :
150- job_config = lock_machines (job_config )
148+ # Create run archive directory if not already created and
149+ # job's archive directory
150+ job_archive_path = job_config ['archive_path' ]
151+ log .info ('Creating job\' s archive dir %s' , job_archive_path )
152+ safepath .makedirs ('/' , job_archive_path )
153+
154+ orig_job_config_path = os .path .join (job_archive_path , 'orig.config.yaml' )
155+ # Write initial job config in job archive dir
156+ with open (orig_job_config_path , 'w' ) as f :
157+ yaml .safe_dump (job_config , f , default_flow_style = False )
151158
152159 run_args = [
153160 os .path .join (teuth_bin_path , 'teuthology-supervisor' ),
@@ -156,19 +163,21 @@ def main(args):
156163 '--archive-dir' , archive_dir ,
157164 ]
158165
159- # Create run archive directory if not already created and
160- # job's archive directory
161- create_job_archive (job_config ['name' ],
162- job_config ['archive_path' ],
163- archive_dir )
164- job_config_path = os .path .join (job_config ['archive_path' ], 'orig.config.yaml' )
166+ targets_job_config = None
167+ # lock machines but do not reimage them
168+ if 'roles' in job_config :
169+ targets_job_config = lock_machines (job_config )
165170
166- # Write initial job config in job archive dir
167- with open (job_config_path , 'w' ) as f :
168- yaml .safe_dump (job_config , f , default_flow_style = False )
171+ job_config_path = os .path .join (job_archive_path , 'targets.config.yaml' )
172+
173+ with open (job_config_path , 'w' ) as f :
174+ yaml .safe_dump (targets_job_config , f , default_flow_style = False )
169175
170- run_args .extend (["--job-config" , job_config_path ])
176+ run_args .extend (["--job-config" , job_config_path ])
177+ else :
178+ run_args .extend (["--job-config" , orig_job_config_path ])
171179
180+ report .try_push_job_info (job_config , dict (status = 'running' ))
172181 try :
173182 job_proc = subprocess .Popen (
174183 run_args ,
@@ -180,8 +189,8 @@ def main(args):
180189 except Exception :
181190 error_message = "Saw error while trying to spawn supervisor."
182191 log .exception (error_message )
183- if 'targets' in job_config :
184- node_names = job_config ["targets" ].keys ()
192+ if targets_job_config and 'targets' in targets_job_config :
193+ node_names = targets_job_config ["targets" ].keys ()
185194 lock_ops .unlock_safe (
186195 node_names ,
187196 job_config ["owner" ],
@@ -337,7 +346,6 @@ def check_job_expiration(job_config):
337346
338347
339348def lock_machines (job_config ):
340- report .try_push_job_info (job_config , dict (status = 'running' ))
341349 fake_ctx = supervisor .create_fake_context (job_config , block = True )
342350 machine_type = job_config ["machine_type" ]
343351 count = len (job_config ['roles' ])
@@ -352,14 +360,5 @@ def lock_machines(job_config):
352360 tries = - 1 ,
353361 reimage = False ,
354362 )
355- job_config = fake_ctx .config
356- return job_config
357-
363+ return fake_ctx .config
358364
359- def create_job_archive (job_name , job_archive_path , archive_dir ):
360- log .info ('Creating job\' s archive dir %s' , job_archive_path )
361- safe_archive = safepath .munge (job_name )
362- run_archive = os .path .join (archive_dir , safe_archive )
363- if not os .path .exists (run_archive ):
364- safepath .makedirs ('/' , run_archive )
365- safepath .makedirs ('/' , job_archive_path )
0 commit comments