Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,9 @@ Optional Arguments:
--max-jobs number Maximum number of simultaneously running jobs from the job array.
-o fmt_string, --output fmt_string
Slurm output file pattern. There will be one file per line in your job file. To suppress slurm out files, set this to /dev/null. Defaults to dsq-jobfile-%A_%a-%N.out
--status-dir dir Directory to save the job_jobid_status.tsv file to. Defaults to working directory.
--suppress-stats-file Don't save job stats to job_jobid_status.tsv
--status-dir dir Directory to save the stats file to. Defaults to working directory.
--stats-file file Filename of the stats file. Defaults to job_jobid_status.tsv
--suppress-stats-file Don't save job stats to stats file.
--submit Submit the job array on the fly instead of creating a submission script.
```

Expand Down
29 changes: 21 additions & 8 deletions dSQBatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,14 +54,21 @@ def parse_args():
parser.add_argument(
"--suppress-stats-file",
action="store_true",
help="Don't save job stats to job_jobid_status.tsv",
help="Don't save job stats to stats file.",
)
parser.add_argument(
"--status-dir",
metavar="dir",
"--status-dir",
metavar="dir",
nargs=1,
default=".",
help="Directory to save the stats file to. Defaults to working directory.",
)
parser.add_argument(
"--stats-file",
metavar="file",
nargs=1,
default=".",
help="Directory to save the job_jobid_status.tsv file to. Defaults to working directory.",
default="job_%j_status.tsv",
help="Filename of the stats file. Defaults to job_jobid_status.tsv."
)
return parser.parse_args()

Expand All @@ -72,7 +79,12 @@ def run_job(args):
# slurm calls individual job array indices "tasks"

hostname = platform.node()


if not args.stats_file[0].endswith(".tsv"):
args.stats_file[0] += ".tsv"

args.stats_file[0] = args.stats_file[0].replace("%j", str(jid))

# use task_id to get my job out of job_file
mycmd = ""
with open(args.job_file[0], "r") as tf:
Expand Down Expand Up @@ -116,10 +128,11 @@ def run_job(args):
[tid, ret, hostname, time_start, time_end, time_elapsed, mycmd],
)
)

# append status file with job stats
with open(
path.join(args.status_dir[0], "job_{}_status.tsv".format(jid)), "a"
os.path.join(args.status_dir[0], args.stats_file[0]), "a"
# os.path.join(args.status_dir[0], "job_{}_status.tsv".format(jid)), "a"
) as out_status:
print(
"{Array_Task_ID}\t{Exit_Code}\t{Hostname}\t{T_Start}\t{T_End}\t{T_Elapsed:.02f}\t{Task}".format(
Expand Down