Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased]
### Added
- `plotman export` command to output summaries from plot logs in `.csv` format. ([#557](https://github.com/ericaltendorf/plotman/pull/557))
- `--json` option for `plotman status`. ([#549](https://github.com/ericaltendorf/plotman/pull/549))

## [0.4.1] - 2021-06-11
### Fixed
Expand Down
24 changes: 24 additions & 0 deletions src/plotman/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,11 @@ def list_from_tuples(
) -> typing.List["Phase"]:
return [cls.from_tuple(t) for t in l]

def __str__(self) -> str:
if not self.known:
return '?:?'
return f'{self.major}:{self.minor}'

# TODO: be more principled and explicit about what we cache vs. what we look up
# dynamically from the logfile
class Job:
Expand Down Expand Up @@ -407,6 +412,25 @@ def status_str_long(self) -> str:
dst = self.dstdir,
logfile = self.logfile
)

def to_dict(self) -> typing.Dict[str, object]:
'''Exports important information as dictionary.'''
return dict(
plot_id=self.plot_id[:8],
k=self.k,
tmp_dir=self.tmpdir,
dst_dir=self.dstdir,
progress=str(self.progress()),
tmp_usage=self.get_tmp_usage(),
pid=self.proc.pid,
run_status=self.get_run_status(),
mem_usage=self.get_mem_usage(),
time_wall=self.get_time_wall(),
time_user=self.get_time_user(),
time_sys=self.get_time_sys(),
time_iowait=self.get_time_iowait()
)


def get_mem_usage(self) -> int:
# Total, inc swapped
Expand Down
18 changes: 12 additions & 6 deletions src/plotman/plotman.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ def parse_args(self) -> typing.Any:

sp.add_parser('version', help='print the version')

sp.add_parser('status', help='show current plotting status')
p_status = sp.add_parser('status', help='show current plotting status')
p_status.add_argument("--json", action="store_true",
help="export status report in json format")

sp.add_parser('dirs', help='show directories info')

Expand Down Expand Up @@ -210,11 +212,15 @@ def main() -> None:

# Status report
if args.cmd == 'status':
result = "{0}\n\n{1}\n\nUpdated at: {2}".format(
reporting.status_report(jobs, get_term_width()),
reporting.summary(jobs),
datetime.datetime.today().strftime("%c"),
)
if args.json:
# convert jobs list into json
result = reporting.json_report(jobs)
else:
result = "{0}\n\n{1}\n\nUpdated at: {2}".format(
reporting.status_report(jobs, get_term_width()),
reporting.summary(jobs),
datetime.datetime.today().strftime("%c"),
)
print(result)

# Directories report
Expand Down
32 changes: 21 additions & 11 deletions src/plotman/reporting.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import time
import json
import math
import os
import typing

import psutil
import texttable as tt # from somewhere?
from itertools import groupby

from plotman import archive, configuration, job, manager, plot_util


Expand All @@ -15,23 +16,17 @@ def abbr_path(path: str, putative_prefix: str) -> str:
else:
return path

def phase_str(phase: job.Phase) -> str:
if not phase.known:
return '?:?'

return f'{phase.major}:{phase.minor}'

def phases_str(phases: typing.List[job.Phase], max_num: typing.Optional[int] = None) -> str:
'''Take a list of phase-subphase pairs and return them as a compact string'''
if not max_num or len(phases) <= max_num:
return ' '.join([phase_str(pair) for pair in phases])
return ' '.join([str(pair) for pair in phases])
else:
n_first = math.floor(max_num / 2)
n_last = max_num - n_first
n_elided = len(phases) - (n_first + n_last)
first = ' '.join([phase_str(pair) for pair in phases[:n_first]])
first = ' '.join([str(pair) for pair in phases[:n_first]])
elided = " [+%d] " % n_elided
last = ' '.join([phase_str(pair) for pair in phases[n_first + n_elided:]])
last = ' '.join([str(pair) for pair in phases[n_first + n_elided:]])
return first + elided + last

def n_at_ph(jobs: typing.List[job.Job], ph: job.Phase) -> int:
Expand Down Expand Up @@ -106,7 +101,7 @@ def status_report(jobs: typing.List[job.Job], width: int, height: typing.Optiona
abbr_path(j.tmpdir, tmp_prefix), # Temp directory
abbr_path(j.dstdir, dst_prefix), # Destination directory
plot_util.time_format(j.get_time_wall()), # Time wall
phase_str(j.progress()), # Overall progress (major:minor)
str(j.progress()), # Overall progress (major:minor)
plot_util.human_format(j.get_tmp_usage(), 0), # Current temp file size
j.proc.pid, # System pid
j.get_run_status(), # OS status for the job process
Expand Down Expand Up @@ -222,3 +217,18 @@ def dirs_report(jobs: typing.List[job.Job], dir_cfg: configuration.Directories,
])

return '\n'.join(reports) + '\n'

def json_report(jobs: typing.List[job.Job]) -> str:
jobs_dicts = []
for j in sorted(jobs, key=job.Job.get_time_wall):
with j.proc.oneshot():
jobs_dicts.append(j.to_dict())

stuff = {
"jobs": jobs_dicts,
"total_jobs": len(jobs),
"updated": time.time(),
}

return json.dumps(stuff)