Skip to content

Commit 7d3d5af

Browse files
committed
[cmd/explore] fix pylint warnings
1 parent 20a8828 commit 7d3d5af

File tree

1 file changed

+75
-40
lines changed

1 file changed

+75
-40
lines changed

nixio/cmd/explore.py

Lines changed: 75 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,8 @@ def progress(count, total, status='', bar_len=60):
7676
filled_len = int(percents * bar_len)
7777
prog_bar = '=' * filled_len + '-' * (bar_len - filled_len)
7878

79-
sys.stderr.write('[%s] %.2f%s ...%s\r' % (prog_bar, percents * 100, '%', status))
79+
sys.stderr.write('[%s] %.2f%s ...%s\r' %
80+
(prog_bar, percents * 100, '%', status))
8081
sys.stderr.flush()
8182

8283

@@ -102,10 +103,12 @@ def assemble_files(arguments):
102103
else:
103104
candidates = sorted(glob.glob(filename))
104105
if len(candidates) == 0:
105-
print("Error: invalid file or directory! No matches found. '{}'".format(filename))
106+
print(
107+
"Error: invalid file or directory! No matches found. '{}'".format(filename))
106108
for candidate in candidates:
107109
if os.path.isdir(candidate):
108-
candidate = os.sep.join((candidate, "*." + arguments.suffix))
110+
candidate = os.sep.join(
111+
(candidate, "*." + arguments.suffix))
109112
all_files.extend(sorted(glob.glob(candidate)))
110113
elif os.path.isfile(candidate):
111114
all_files.append(candidate)
@@ -127,27 +130,33 @@ def dimension_description(data_array, _):
127130

128131
def array_structure(data_array, verbosity):
129132
dims = dimension_description(data_array, verbosity)
130-
content = "\n shape: %s, dtype: %s %s" % (data_array.shape, data_array.dtype, dims)
133+
content = "\n shape: %s, dtype: %s %s" % (
134+
data_array.shape, data_array.dtype, dims)
131135
return content
132136

133-
content = " Data Arrays:\n" if len(block.data_arrays) > 0 else ""
137+
content = " Data Arrays:\n" if len(
138+
block.data_arrays) > 0 else ""
134139
for da in block.data_arrays:
135-
array_struct = array_structure(da, verbosity) if verbosity > 2 else ""
136-
content += " %s [%s] --- id: %s %s\n" % (da.name, da.type, da.id, array_struct)
140+
array_struct = array_structure(
141+
da, verbosity) if verbosity > 2 else ""
142+
content += " %s [%s] --- id: %s %s\n" % (
143+
da.name, da.type, da.id, array_struct)
137144
return content
138145

139146
def group_content(block, _):
140147
content = " Groups:\n" if len(block.groups) > 0 else ""
141148
for grp in block.groups:
142-
content += " %s [%s] -- id: %s\n" % (grp.name, grp.type, grp.id)
149+
content += " %s [%s] -- id: %s\n" % (
150+
grp.name, grp.type, grp.id)
143151
return content
144152

145153
def tag_content(block, verbosity):
146154
def tag_details(tag, verbosity):
147155
if verbosity < 3:
148156
return ""
149157

150-
content = " start: %s, extent: %s" % (tag.position, tag.extent)
158+
content = " start: %s, extent: %s" % (
159+
tag.position, tag.extent)
151160
for ref in tag.references:
152161
content += "\n refers to -> %s" % ref.name
153162
for feat in tag.features:
@@ -157,15 +166,17 @@ def tag_details(tag, verbosity):
157166

158167
content = " Tags:\n"
159168
for tag in block.tags:
160-
content += " %s [%s] --- id: %s\n %s" % (tag.name, tag.type, tag.id, tag_details(tag, verbosity))
169+
content += " %s [%s] --- id: %s\n %s" % (
170+
tag.name, tag.type, tag.id, tag_details(tag, verbosity))
161171
return content
162172

163173
def mtag_content(block, verbosity):
164174
def tag_details(tag, verbosity):
165175
if verbosity < 3:
166176
return ""
167177

168-
content = " start: %s, extent: %s" % (tag.position[:], tag.extent[:])
178+
content = " start: %s, extent: %s" % (
179+
tag.position[:], tag.extent[:])
169180
for ref in tag.references:
170181
content += "\n segment refers to -> %s" % ref.name
171182
for feat in tag.features:
@@ -180,15 +191,18 @@ def tag_details(tag, verbosity):
180191
return content
181192

182193
def frame_content(block, _):
183-
content = " Data frames:\n" if len(block.data_frames) > 0 else ""
194+
content = " Data frames:\n" if len(
195+
block.data_frames) > 0 else ""
184196
for df in block.data_frames:
185-
content += " %s [%s] -- id: %s\n" % (df.name, df.type, df.id)
197+
content += " %s [%s] -- id: %s\n" % (
198+
df.name, df.type, df.id)
186199
return content
187200

188201
def source_content(block, _):
189202
content = " Sources:\n" if len(block.groups) > 0 else ""
190203
for src in block.sources:
191-
content += " %s [%s] -- id: %s\n" % (src.name, src.type, src.id)
204+
content += " %s [%s] -- id: %s\n" % (
205+
src.name, src.type, src.id)
192206
return content
193207

194208
content = ""
@@ -220,7 +234,8 @@ def subsections(section, _):
220234
content = ""
221235
sections = section.find_sections()
222236
prop_count = sum([len(sec) for sec in sections])
223-
content += "\n %i sub-section(s), %i properties\n" % (len(sections), prop_count)
237+
content += "\n %i sub-section(s), %i properties\n" % (
238+
len(sections), prop_count)
224239
return content
225240

226241
content = ""
@@ -229,18 +244,21 @@ def subsections(section, _):
229244
else:
230245
for section in nix_file.sections:
231246
subs_props = subsections(section, verbosity)
232-
content += " %s [%s] --- id: %s%s" % (section.name, section.type, section.id, subs_props)
247+
content += " %s [%s] --- id: %s%s" % (
248+
section.name, section.type, section.id, subs_props)
233249
return content
234250

235251
def file_content(nix_file, verbosity):
236252
if verbosity is None:
237253
verbosity = 0
238254
content = ""
239255
if verbosity < 1:
240-
content += "%s\n%s" % (block_content(nix_file, verbosity), section_content(nix_file, verbosity))
256+
content += "%s\n%s" % (block_content(nix_file, verbosity),
257+
section_content(nix_file, verbosity))
241258
else:
242259
content += " data:\n%s" % block_content(nix_file, verbosity)
243-
content += " metadata:\n%s\n" % section_content(nix_file, verbosity)
260+
content += " metadata:\n%s\n" % section_content(
261+
nix_file, verbosity)
244262
return content
245263

246264
print("\n%s\n\n" % file_content(nix_file, verbosity))
@@ -321,15 +339,17 @@ def disp_metadata(filename, arguments):
321339
part = parts[1] if case_sensitive else parts[1].lower()
322340
pname = prop.name if case_sensitive else prop.name.lower()
323341
if part == pname if full_match else part in pname:
324-
print("[section: %s, type: %s, id: %s] >> " % (sec.name, sec.type, sec.id), end="")
342+
print("[section: %s, type: %s, id: %s] >> " %
343+
(sec.name, sec.type, sec.id), end="")
325344
prop.pprint()
326345
else:
327346
sections = find_section(nf, patt, case_sensitive)
328347
if len(sections) == 0:
329348
props = find_props(nf, patt, case_sensitive)
330349
for sec in props.keys():
331350
for prop in props[sec]:
332-
print("[section: %s, type: %s, id: %s] >> " % (sec.name, sec.type, sec.id), end="")
351+
print("[section: %s, type: %s, id: %s] >> " %
352+
(sec.name, sec.type, sec.id), end="")
333353
prop.pprint()
334354
else:
335355
for sec in sections:
@@ -454,15 +474,20 @@ def dump_twod(data, dimensions, label, unit, outfile, fmt="%.6f", end="\n\n",
454474
numeric_2nd_dim_ticks = isinstance(second_dim_ticks[0], (int, float))
455475
numeric_data = isinstance(data[0, 0], (int, float))
456476
data_conv_func = (lambda x: fmt % x) if numeric_data else str
457-
dim_ticks_conv_func1 = (lambda x: fmt % x) if numeric_1st_dim_ticks else str
458-
dim_ticks_conv_func2 = (lambda x: fmt % x) if numeric_2nd_dim_ticks else str
477+
dim_ticks_conv_func1 = (lambda x: fmt %
478+
x) if numeric_1st_dim_ticks else str
479+
dim_ticks_conv_func2 = (lambda x: fmt %
480+
x) if numeric_2nd_dim_ticks else str
459481

460-
max_tick_len = max([len(dim_ticks_conv_func1(first_dim_ticks[-1])), len(first_dim_label)])
482+
max_tick_len = max(
483+
[len(dim_ticks_conv_func1(first_dim_ticks[-1])), len(first_dim_label)])
461484
print("# data label: %s" % label, file=outfile)
462485
print("# data unit: %s\n" % unit, file=outfile)
463-
padding = " " * (max_tick_len - (len(first_dim_label) if first_dim_unit else 0))
486+
padding = " " * (max_tick_len - (len(first_dim_label)
487+
if first_dim_unit else 0))
464488
print("# %s%s%s" % (first_dim_label, padding, second_dim_label), file=outfile)
465-
padding = " " * (max_tick_len - (len(first_dim_unit) if first_dim_unit else 0))
489+
padding = " " * (max_tick_len - (len(first_dim_unit)
490+
if first_dim_unit else 0))
466491
print("# %s%s%s" % (first_dim_unit, padding, second_dim_unit), file=outfile)
467492
# first line contains 2nd dim ticks
468493
print(" " * max_tick_len + " " + (" " * max_tick_len + " ").join(map(dim_ticks_conv_func2, second_dim_ticks)),
@@ -521,7 +546,8 @@ def data_dump(filename, arguments, outfile, show_progress=False):
521546
entities = find_data_entity(nix_file, arguments)
522547
for ent in entities:
523548
if isinstance(ent, nix.DataArray):
524-
sys.stderr.write("Dumping %s to %s...\n" % (ent.name, arguments.outfile))
549+
sys.stderr.write("Dumping %s to %s...\n" %
550+
(ent.name, arguments.outfile))
525551
dump_data_array(ent, filename, outfile, show_progress)
526552
sys.stderr.write("\n")
527553
nix_file.close()
@@ -537,7 +563,8 @@ def data_plotter(filename, arguments):
537563
plotter.plot()
538564
plotter.show()
539565
else:
540-
print("Could not find a suitable plotter for the DataArray: %s" % str(ent))
566+
print(
567+
"Could not find a suitable plotter for the DataArray: %s" % str(ent))
541568
else:
542569
print("Sorry, so far I can only try to plot DataArrays.")
543570

@@ -549,7 +576,8 @@ def disp_data(filename, arguments):
549576
entities = find_data_entity(nix_file, arguments)
550577
print("# File: %s" % filename)
551578
for ent in entities:
552-
print("# entity: %s\n# type: %s\n# id: %s" % (ent.name, ent.type, ent.id))
579+
print("# entity: %s\n# type: %s\n# id: %s" %
580+
(ent.name, ent.type, ent.id))
553581
print("# created at: %s\n# last edited at: %s\n" % (str(dt.datetime.fromtimestamp(ent.created_at)),
554582
str(dt.datetime.fromtimestamp(ent.updated_at))))
555583
print(ent)
@@ -574,7 +602,8 @@ def dump_worker(arguments):
574602
func = data_dump
575603
if len(arguments.outfile) > 0:
576604
if os.path.exists(arguments.outfile):
577-
response = input("File %s already exists, are you sure to overwrite it? y/N:" % arguments.outfile)
605+
response = input(
606+
"File %s already exists, are you sure to overwrite it? y/N:" % arguments.outfile)
578607
if response.lower() != "y":
579608
print("... data dump aborted.")
580609
return
@@ -601,25 +630,25 @@ def add_default_file_args(parent_parser):
601630
parent_parser.add_argument("file", type=str, nargs="+",
602631
help="Path to file (at least one)")
603632
parent_parser.add_argument("-s", "--suffix", type=str, default="nix", nargs="?",
604-
help=("The file suffix used for nix data files (default: %(default)s)."))
633+
help="The file suffix used for nix data files (default: %(default)s).")
605634

606635

607636
def add_default_args(parent_parser):
608637
parent_parser.add_argument("-c", "--case_sensitive", action="store_true",
609638
help=("matching of entitiy names and types is case sensitive, "
610639
"by default the case is ignored"))
611640
parent_parser.add_argument("-fm", "--full_match", action="store_true",
612-
help=("names and types must be full matches, by default a partial match is sufficient"))
641+
help="names and types must be full matches, by default a partial match is sufficient")
613642

614643

615644
def create_metadata_parser(parent_parser):
616645
meta_parser = parent_parser.add_parser("metadata", help="Filter and display metadata",
617646
aliases=["mdata"],
618-
description=("Search for metadata items or display metadata (sub)trees."))
647+
description="Search for metadata items or display metadata (sub)trees.")
619648
meta_parser.add_argument("-p", "--pattern", type=str, action="append",
620649
help=METADATA_PATTERN_HELP)
621650
meta_parser.add_argument("-d", "--depth", type=int, default=-1,
622-
help=("maximum depth of metadata tree output, default is %(default)s, full depth"))
651+
help="maximum depth of metadata tree output, default is %(default)s, full depth")
623652
add_default_args(meta_parser)
624653
add_default_file_args(meta_parser)
625654
meta_parser.set_defaults(func=mdata_worker)
@@ -629,18 +658,23 @@ def create_metadata_parser(parent_parser):
629658

630659
def create_data_parser(parent_parser):
631660
data_parser = parent_parser.add_parser("data",
632-
help=("Search and display information about data entities"),
661+
help=(
662+
"Search and display information about data entities"),
633663
description=DATA_PARSER_HELP)
634-
data_parser.add_argument("-p", "--pattern", default="", type=str, help=DATA_PATTERN_HELP)
664+
data_parser.add_argument(
665+
"-p", "--pattern", default="", type=str, help=DATA_PATTERN_HELP)
635666
add_default_args(data_parser)
636667
add_default_file_args(data_parser)
637668
data_parser.set_defaults(func=data_worker)
638669

639670

640671
def create_dump_parser(parent_parser):
641-
dump_parser = parent_parser.add_parser("dump", help="Dump stored data to stdout", description=DUMP_PARSER_HELP)
642-
dump_parser.add_argument("-p", "--pattern", default="", type=str, help=DUMP_PATTERN_HELP)
643-
dump_parser.add_argument("-o", "--outfile", default="", type=str, help=DUMP_OUTFILE_HELP)
672+
dump_parser = parent_parser.add_parser(
673+
"dump", help="Dump stored data to stdout", description=DUMP_PARSER_HELP)
674+
dump_parser.add_argument(
675+
"-p", "--pattern", default="", type=str, help=DUMP_PATTERN_HELP)
676+
dump_parser.add_argument(
677+
"-o", "--outfile", default="", type=str, help=DUMP_OUTFILE_HELP)
644678
add_default_args(dump_parser)
645679
add_default_file_args(dump_parser)
646680
dump_parser.set_defaults(func=dump_worker)
@@ -651,7 +685,8 @@ def create_plot_parser(parent_parser):
651685
return
652686
plot_parser = parent_parser.add_parser("plot", help="Create basic plots of stored data.",
653687
description=PLOT_PARSER_HELP)
654-
plot_parser.add_argument("-p", "--pattern", type=str, help=DATA_PATTERN_HELP)
688+
plot_parser.add_argument(
689+
"-p", "--pattern", type=str, help=DATA_PATTERN_HELP)
655690
add_default_args(plot_parser)
656691
add_default_file_args(plot_parser)
657692
plot_parser.set_defaults(func=plot_worker)
@@ -662,7 +697,7 @@ def create_file_parser(parent_parser):
662697
description=("Quick display of file information such as creation date, "
663698
"file size and structure etc."))
664699
file_parser.add_argument("-v", "--verbosity", action="count",
665-
help=("increase output verbosity, use -v, -vv, -vvv for more verbose output"))
700+
help="increase output verbosity, use -v, -vv, -vvv for more verbose output")
666701
add_default_file_args(file_parser)
667702
file_parser.set_defaults(func=file_worker)
668703

0 commit comments

Comments
 (0)