@@ -304,13 +304,12 @@ def _getDelegationID(self, arcJobID):
304
304
305
305
#############################################################################
306
306
307
- def _getArcJobID (self , executableFile , inputs , outputs , executables , delegation ):
307
+ def _getArcJobID (self , executableFile , inputs , outputs , delegation ):
308
308
"""Get an ARC JobID endpoint to upload executables and inputs.
309
309
310
310
:param str executableFile: executable to submit
311
311
:param list inputs: list of input files
312
312
:param list outputs: list of expected output files
313
- :param list executables: list of secondary executables (will be uploaded with the executable mode)
314
313
:param str delegation: delegation ID
315
314
316
315
:return: tuple containing a job ID and a stamp
@@ -320,7 +319,7 @@ def _getArcJobID(self, executableFile, inputs, outputs, executables, delegation)
320
319
query = self ._urlJoin ("jobs" )
321
320
322
321
# Get the job into the ARC way
323
- xrslString , diracStamp = self ._writeXRSL (executableFile , inputs , outputs , executables )
322
+ xrslString , diracStamp = self ._writeXRSL (executableFile , inputs , outputs )
324
323
xrslString += delegation
325
324
self .log .debug ("XRSL string submitted" , "is %s" % xrslString )
326
325
self .log .debug ("DIRAC stamp for job" , "is %s" % diracStamp )
@@ -344,21 +343,16 @@ def _getArcJobID(self, executableFile, inputs, outputs, executables, delegation)
344
343
arcJobID = responseJob ["id" ]
345
344
return S_OK ((arcJobID , diracStamp ))
346
345
347
- def _uploadJobDependencies (self , arcJobID , executableFile , inputs , executables ):
346
+ def _uploadJobDependencies (self , arcJobID , executableFile , inputs ):
348
347
"""Upload job dependencies so that the job can start.
349
348
This includes the executables and the inputs.
350
349
351
350
:param str arcJobID: ARC job ID
352
351
:param str executableFile: executable file
353
352
:param list inputs: inputs required by the executable file
354
- :param list executables: executables require by the executable file
355
353
"""
356
354
filesToSubmit = [executableFile ]
357
- filesToSubmit += executables
358
- if inputs :
359
- if not isinstance (inputs , list ):
360
- inputs = [inputs ]
361
- filesToSubmit += inputs
355
+ filesToSubmit += inputs
362
356
363
357
for fileToSubmit in filesToSubmit :
364
358
queryExecutable = self ._urlJoin (os .path .join ("jobs" , arcJobID , "session" , os .path .basename (fileToSubmit )))
@@ -376,32 +370,6 @@ def _uploadJobDependencies(self, arcJobID, executableFile, inputs, executables):
376
370
self .log .verbose ("Input correctly uploaded" , fileToSubmit )
377
371
return S_OK ()
378
372
379
- def _killJob (self , arcJobList ):
380
- """Kill the specified jobs
381
-
382
- :param list arcJobList: list of ARC Job IDs
383
- """
384
- result = self ._checkSession ()
385
- if not result ["OK" ]:
386
- self .log .error ("Cannot kill jobs" , result ["Message" ])
387
- return result
388
-
389
- # List of jobs in json format for the REST query
390
- jobsJson = {"job" : [{"id" : job } for job in arcJobList ]}
391
-
392
- # Prepare the command
393
- params = {"action" : "kill" }
394
- query = self ._urlJoin ("jobs" )
395
-
396
- # Killing jobs should be fast
397
- result = self ._request ("post" , query , params = params , data = json .dumps (jobsJson ))
398
- if not result ["OK" ]:
399
- self .log .error ("Failed to kill all these jobs." , result ["Message" ])
400
- return S_ERROR ("Failed to kill all these jobs" )
401
-
402
- self .log .debug ("Successfully deleted jobs" )
403
- return S_OK ()
404
-
405
373
def submitJob (self , executableFile , proxy , numberOfJobs = 1 , inputs = None , outputs = None ):
406
374
"""Method to submit job
407
375
Assume that the ARC queues are always of the format nordugrid-<batchSystem>-<queue>
@@ -423,10 +391,14 @@ def submitJob(self, executableFile, proxy, numberOfJobs=1, inputs=None, outputs=
423
391
else :
424
392
delegation = "\n (delegationid=%s)" % result ["Value" ]
425
393
394
+ if not inputs :
395
+ inputs = []
396
+ if not outputs :
397
+ outputs = []
398
+
426
399
# If there is a preamble, then we bundle it in an executable file
427
- executables = []
428
400
if self .preamble :
429
- executables = [ executableFile ]
401
+ inputs . append ( executableFile )
430
402
executableFile = self ._bundlePreamble (executableFile )
431
403
432
404
# Submit multiple jobs sequentially.
@@ -436,14 +408,14 @@ def submitJob(self, executableFile, proxy, numberOfJobs=1, inputs=None, outputs=
436
408
batchIDList = []
437
409
stampDict = {}
438
410
for _ in range (numberOfJobs ):
439
- result = self ._getArcJobID (executableFile , inputs , outputs , executables , delegation )
411
+ result = self ._getArcJobID (executableFile , inputs , outputs , delegation )
440
412
if not result ["OK" ]:
441
413
break
442
414
arcJobID , diracStamp = result ["Value" ]
443
415
444
416
# At this point, only the XRSL job has been submitted to AREX services
445
417
# Here we also upload the executable, other executable files and inputs.
446
- result = self ._uploadJobDependencies (arcJobID , executableFile , inputs , executables )
418
+ result = self ._uploadJobDependencies (arcJobID , executableFile , inputs )
447
419
if not result ["OK" ]:
448
420
break
449
421
@@ -469,12 +441,83 @@ def killJob(self, jobIDList):
469
441
470
442
:param list jobIDList: list of DIRAC Job IDs
471
443
"""
444
+ if not isinstance (jobIDList , list ):
445
+ jobIDList = [jobIDList ]
472
446
self .log .debug ("Killing jobs" , "," .join (jobIDList ))
473
447
474
- # List of jobs in json format for the REST query
475
- jList = [self ._DiracToArcID (job ) for job in jobIDList ]
448
+ # Convert DIRAC jobs to ARC jobs
449
+ # DIRAC Jobs might be stored with a DIRAC stamp (":::XXXXX") that should be removed
450
+ jList = [self ._DiracToArcID (job .split (":::" )[0 ]) for job in jobIDList ]
476
451
return self ._killJob (jList )
477
452
453
+ def _killJob (self , arcJobList ):
454
+ """Kill the specified jobs
455
+
456
+ :param list arcJobList: list of ARC Job IDs
457
+ """
458
+ result = self ._checkSession ()
459
+ if not result ["OK" ]:
460
+ self .log .error ("Cannot kill jobs" , result ["Message" ])
461
+ return result
462
+
463
+ # List of jobs in json format for the REST query
464
+ jobsJson = {"job" : [{"id" : job } for job in arcJobList ]}
465
+
466
+ # Prepare the command
467
+ params = {"action" : "kill" }
468
+ query = self ._urlJoin ("jobs" )
469
+
470
+ # Killing jobs should be fast
471
+ result = self ._request ("post" , query , params = params , data = json .dumps (jobsJson ))
472
+ if not result ["OK" ]:
473
+ self .log .error ("Failed to kill all these jobs." , result ["Message" ])
474
+ return S_ERROR ("Failed to kill all these jobs" )
475
+
476
+ self .log .debug ("Successfully deleted jobs" )
477
+ return S_OK ()
478
+
479
+ #############################################################################
480
+
481
+ def cleanJob (self , jobIDList ):
482
+ """Clean files related to the specified jobs
483
+
484
+ :param list jobIDList: list of DIRAC Job IDs
485
+ """
486
+ if not isinstance (jobIDList , list ):
487
+ jobIDList = [jobIDList ]
488
+ self .log .debug ("Cleaning jobs" , "," .join (jobIDList ))
489
+
490
+ # Convert DIRAC jobs to ARC jobs
491
+ # DIRAC Jobs might be stored with a DIRAC stamp (":::XXXXX") that should be removed
492
+ jList = [self ._DiracToArcID (job .split (":::" )[0 ]) for job in jobIDList ]
493
+ return self ._cleanJob (jList )
494
+
495
+ def _cleanJob (self , arcJobList ):
496
+ """Clean files related to the specified jobs
497
+
498
+ :param list jobIDList: list of ARC Job IDs
499
+ """
500
+ result = self ._checkSession ()
501
+ if not result ["OK" ]:
502
+ self .log .error ("Cannot clean jobs" , result ["Message" ])
503
+ return result
504
+
505
+ # List of jobs in json format for the REST query
506
+ jobsJson = {"job" : [{"id" : job } for job in arcJobList ]}
507
+
508
+ # Prepare the command
509
+ params = {"action" : "clean" }
510
+ query = self ._urlJoin ("jobs" )
511
+
512
+ # Cleaning jobs
513
+ result = self ._request ("post" , query , params = params , data = json .dumps (jobsJson ))
514
+ if not result ["OK" ]:
515
+ self .log .error ("Failed to clean all these jobs." , result ["Message" ])
516
+ return S_ERROR ("Failed to clean all these jobs" )
517
+
518
+ self .log .debug ("Successfully cleaned jobs" )
519
+ return S_OK ()
520
+
478
521
#############################################################################
479
522
480
523
def getCEStatus (self ):
@@ -613,14 +656,10 @@ def getJobStatus(self, jobIDList):
613
656
if not isinstance (jobIDList , list ):
614
657
jobIDList = [jobIDList ]
615
658
616
- # Jobs are stored with a DIRAC stamp (":::XXXXX") appended
617
- jobList = []
618
- for j in jobIDList :
619
- job = j .split (":::" )[0 ]
620
- jobList .append (job )
621
-
622
- self .log .debug ("Getting status of jobs : %s" % jobList )
623
- arcJobsJson = {"job" : [{"id" : self ._DiracToArcID (job )} for job in jobList ]}
659
+ self .log .debug ("Getting status of jobs:" , jobIDList )
660
+ # Convert DIRAC jobs to ARC jobs and encapsulate them in a dictionary for the REST query
661
+ # DIRAC Jobs might be stored with a DIRAC stamp (":::XXXXX") that should be removed
662
+ arcJobsJson = {"job" : [{"id" : self ._DiracToArcID (job .split (":::" )[0 ])} for job in jobIDList ]}
624
663
625
664
# Prepare the command
626
665
params = {"action" : "status" }
@@ -688,12 +727,8 @@ def getJobLog(self, jobID):
688
727
self .log .error ("Cannot get job logging info" , result ["Message" ])
689
728
return result
690
729
691
- # Extract stamp from the Job ID
692
- if ":::" in jobID :
693
- jobID = jobID .split (":::" )[0 ]
694
-
695
730
# Prepare the command: Get output files
696
- arcJob = self ._DiracToArcID (jobID )
731
+ arcJob = self ._DiracToArcID (jobID . split ( ":::" )[ 0 ] )
697
732
query = self ._urlJoin (os .path .join ("jobs" , arcJob , "diagnose" , "errors" ))
698
733
699
734
# Submit the GET request to retrieve outputs
@@ -759,9 +794,9 @@ def getJobOutput(self, jobID, workingDirectory=None):
759
794
remoteOutputs = result ["Value" ]
760
795
self .log .debug ("Outputs to get are" , remoteOutputs )
761
796
762
- # We assume that workingDirectory exists
763
797
if not workingDirectory :
764
798
if "WorkingDirectory" in self .ceParameters :
799
+ # We assume that workingDirectory exists
765
800
workingDirectory = os .path .join (self .ceParameters ["WorkingDirectory" ], job )
766
801
else :
767
802
workingDirectory = job
0 commit comments