Skip to content

Commit 9d251f0

Browse files
black formatting (#1781)
Co-authored-by: apdavison <[email protected]>
1 parent 72ec76a commit 9d251f0

File tree

1 file changed

+20
-15
lines changed

1 file changed

+20
-15
lines changed

neo/rawio/blackrockrawio.py

Lines changed: 20 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -331,17 +331,16 @@ def _parse_header(self):
331331
# read nsx headers
332332
nsx_header_reader = self._nsx_header_reader[spec_version]
333333
self._nsx_basic_header[nsx_nb], self._nsx_ext_header[nsx_nb] = nsx_header_reader(nsx_nb)
334-
334+
335335
# The Blackrock defines period as the number of 1/30_000 seconds between data points
336336
# E.g. it is 1 for 30_000, 3 for 10_000, etc
337337
nsx_period = self._nsx_basic_header[nsx_nb]["period"]
338338
sampling_rate = 30_000.0 / nsx_period
339339
self._nsx_sampling_frequency[nsx_nb] = float(sampling_rate)
340340

341-
342341
# Parase data packages
343342
for nsx_nb in self._avail_nsx:
344-
343+
345344
# The only way to know if it is the Precision Time Protocol of file spec 3.0
346345
# is to check for nanosecond timestamp resolution.
347346
is_ptp_variant = (
@@ -399,7 +398,9 @@ def _parse_header(self):
399398

400399
self.nsx_datas = {}
401400
# Keep public attribute for backward compatibility but let's use the private one and maybe deprecate this at some point
402-
self.sig_sampling_rates = {nsx_number: self._nsx_sampling_frequency[nsx_number] for nsx_number in self.nsx_to_load}
401+
self.sig_sampling_rates = {
402+
nsx_number: self._nsx_sampling_frequency[nsx_number] for nsx_number in self.nsx_to_load
403+
}
403404
if len(self.nsx_to_load) > 0:
404405
for nsx_nb in self.nsx_to_load:
405406
basic_header = self._nsx_basic_header[nsx_nb]
@@ -1072,7 +1073,6 @@ def _read_nsx_dataheader_spec_v30_ptp(
10721073
# some packets have more than 1 sample. Not actually ptp. Revert to non-ptp variant.
10731074
return self._read_nsx_dataheader_spec_v22_30(nsx_nb, filesize=filesize, offset=header_size)
10741075

1075-
10761076
# Segment data, at the moment, we segment, where the data has gaps that are longer
10771077
# than twice the sampling period.
10781078
sampling_rate = self._nsx_sampling_frequency[nsx_nb]
@@ -1081,29 +1081,32 @@ def _read_nsx_dataheader_spec_v30_ptp(
10811081
# The raw timestamps are the indices of an ideal clock that ticks at `timestamp_resolution` times per second.
10821082
# We convert this indices to actual timestamps in seconds
10831083
raw_timestamps = struct_arr["timestamps"]
1084-
timestamps_sampling_rate = self._nsx_basic_header[nsx_nb]["timestamp_resolution"] # clocks per sec uint64 or uint32
1084+
timestamps_sampling_rate = self._nsx_basic_header[nsx_nb][
1085+
"timestamp_resolution"
1086+
] # clocks per sec uint64 or uint32
10851087
timestamps_in_seconds = raw_timestamps / timestamps_sampling_rate
10861088

10871089
time_differences = np.diff(timestamps_in_seconds)
10881090
gap_indices = np.argwhere(time_differences > segmentation_threshold).flatten()
10891091
segment_starts = np.hstack((0, 1 + gap_indices))
1090-
1092+
10911093
# Report gaps if any are found
10921094
if len(gap_indices) > 0:
10931095
import warnings
1096+
10941097
threshold_ms = segmentation_threshold * 1000
1095-
1098+
10961099
# Calculate all gap details in vectorized operations
10971100
gap_durations_seconds = time_differences[gap_indices]
10981101
gap_durations_ms = gap_durations_seconds * 1000
10991102
gap_positions_seconds = timestamps_in_seconds[gap_indices] - timestamps_in_seconds[0]
1100-
1103+
11011104
# Build gap detail lines all at once
11021105
gap_detail_lines = [
11031106
f"| {index:>15,} | {pos:>21.6f} | {dur:>21.3f} |\n"
11041107
for index, pos, dur in zip(gap_indices, gap_positions_seconds, gap_durations_ms)
11051108
]
1106-
1109+
11071110
segmentation_report_message = (
11081111
f"\nFound {len(gap_indices)} gaps for nsx {nsx_nb} where samples are farther apart than {threshold_ms:.3f} ms.\n"
11091112
f"Data will be segmented at these locations to create {len(segment_starts)} segments.\n\n"
@@ -1112,15 +1115,17 @@ def _read_nsx_dataheader_spec_v30_ptp(
11121115
"| Sample Index | Sample at | Gap Jump |\n"
11131116
"| | (Seconds) | (Milliseconds) |\n"
11141117
"+-----------------+-----------------------+-----------------------+\n"
1115-
+ ''.join(gap_detail_lines) +
1116-
"+-----------------+-----------------------+-----------------------+\n"
1118+
+ "".join(gap_detail_lines)
1119+
+ "+-----------------+-----------------------+-----------------------+\n"
11171120
)
11181121
warnings.warn(segmentation_report_message)
1119-
1122+
11201123
# Calculate all segment boundaries and derived values in one operation
11211124
segment_boundaries = list(segment_starts) + [len(struct_arr) - 1]
1122-
segment_num_data_points = [segment_boundaries[i+1] - segment_boundaries[i] for i in range(len(segment_starts))]
1123-
1125+
segment_num_data_points = [
1126+
segment_boundaries[i + 1] - segment_boundaries[i] for i in range(len(segment_starts))
1127+
]
1128+
11241129
size_of_data_block = struct_arr.dtype.itemsize
11251130
segment_offsets = [header_size + pos * size_of_data_block for pos in segment_starts]
11261131

0 commit comments

Comments
 (0)