Skip to content

Commit 4763c49

Browse files
authored
Merge pull request #1461 from NeuralEnsemble/black-formatting
Black formatting
2 parents e9b57b0 + b84a1d8 commit 4763c49

File tree

10 files changed

+41
-31
lines changed

10 files changed

+41
-31
lines changed

examples/plot_igorio.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
===========================
44
55
"""
6+
67
###########################################################
78
# Import our packages
89
import os
@@ -28,7 +29,7 @@
2829
zip_ref.close()
2930

3031
######################################################
31-
# Once we have our data we can use `get_io` to find an
32+
# Once we have our data we can use `get_io` to find an
3233
# io (Igor in this case). Then we read the analogsignals
3334
# Finally we will make some nice plots
3435
reader = get_io(filename)
@@ -37,4 +38,4 @@
3738
plt.xlabel(signal.sampling_period.dimensionality)
3839
plt.ylabel(signal.dimensionality)
3940

40-
plt.show()
41+
plt.show()

examples/plot_imageseq.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
############################################################
1919
# Now we need to generate some data
20-
# We will just make a nice box and then we can attach this
20+
# We will just make a nice box and then we can attach this
2121
# ImageSequence to a variety of ROIs
2222
# our ImageSequence will be 50 frames of 100x100 pixel images
2323

@@ -36,9 +36,9 @@
3636
image_seq = ImageSequence(l, sampling_rate=500 * pq.Hz, spatial_scale="m", units="V")
3737

3838
result = image_seq.signal_from_region(
39-
CircularRegionOfInterest(image_seq,50, 50, 25),
39+
CircularRegionOfInterest(image_seq, 50, 50, 25),
4040
CircularRegionOfInterest(image_seq, 10, 10, 5),
41-
PolygonRegionOfInterest(image_seq,(50, 25), (50, 45), (14, 65), (90, 80)),
41+
PolygonRegionOfInterest(image_seq, (50, 25), (50, 45), (14, 65), (90, 80)),
4242
)
4343

4444
###############################################################

examples/plot_multi_tetrode_example.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
Analyzing and Plotting Data with Neo Structures
33
===============================================
44
"""
5+
56
######################################################
67
# First we import some packages. Since we are making simulated
78
# data we will import quite a few neo features as well as use
@@ -31,7 +32,7 @@
3132

3233
##################################################################################
3334
# Neo can also have groups. Groups are structures within a block that can cross segments
34-
# for example we could group a neuron across trials or across probes.
35+
# for example we could group a neuron across trials or across probes.
3536

3637
# Create a group for each neuron, annotate each group with the tetrode from which it was recorded
3738
groups = []
@@ -86,7 +87,7 @@
8687
# since its data can be treated like numpy arrays
8788
# it is easy to use standard packages like matplotlib
8889
# for all your plotting needs
89-
# We do a classic in neuroscience and show various ways
90+
# We do a classic in neuroscience and show various ways
9091
# to plot a PSTH (Peristimulus histogram)
9192

9293
###################################################

examples/plot_read_files_neo_io.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
urllib.request.urlretrieve(distantfile, localfile)
2020

2121

22-
2322
###################################################
2423
# Now we can create our reader and read some data
2524

@@ -31,7 +30,7 @@
3130

3231
######################################################
3332
# Once we have our blocks we can iterate through each
34-
# block of data and see the contents of all parts of
33+
# block of data and see the contents of all parts of
3534
# that data
3635

3736
# access to segments
@@ -45,7 +44,7 @@
4544

4645
#######################################################
4746
# Let's look at another file type
48-
47+
4948
# CED Spike2 files
5049
distantfile = url_repo + "spike2/File_spike2_1.smr"
5150
localfile = "./File_spike2_1.smr"

examples/plot_read_files_neo_rawio.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
55
compare with read_files_neo_io.py
66
"""
7+
78
###########################################################
89
# First we import a RawIO from neo.rawio
910
# For this example we will use PlexonRawIO
@@ -42,9 +43,9 @@
4243
###############################################################
4344
# Read signal chunks
4445
# This is how we read raw data. We choose indices that we want or
45-
# we can use None to mean look at all channels. We also need to
46+
# we can use None to mean look at all channels. We also need to
4647
# specify the block of data (block_index) as well as the segment
47-
# (seg_index). Then we give the index start and stop. Since we
48+
# (seg_index). Then we give the index start and stop. Since we
4849
# often think in time: to go from time to index would just require
4950
# the sample rate (so index = time / sampling_rate)
5051

@@ -82,7 +83,7 @@
8283

8384
# Count units and spikes per unit
8485
nb_unit = reader.spike_channels_count()
85-
print(f"nb_unit: {nb_unit}\n") # nb_unit stands for number of units
86+
print(f"nb_unit: {nb_unit}\n") # nb_unit stands for number of units
8687
print("spike_channel_index nb_spike")
8788
for spike_channel_index in range(nb_unit):
8889
nb_spike = reader.spike_count(block_index=0, seg_index=0, spike_channel_index=spike_channel_index)
@@ -99,7 +100,7 @@
99100

100101
#######################################################################
101102
# Some file formats can also give waveform information. We are lucky
102-
# again our file has waveform data!! We forms are a 3d dataset of
103+
# again our file has waveform data!! We forms are a 3d dataset of
103104
# (nb_spike, nb_channel, nb_sample)
104105

105106
# Read spike waveforms
@@ -134,7 +135,7 @@
134135

135136
nb_event_channel = reader.event_channels_count()
136137
print(f"nb_event_channel: {nb_event_channel}")
137-
# now iterate through the channels
138+
# now iterate through the channels
138139
for chan_index in range(nb_event_channel):
139140
nb_event = reader.event_count(block_index=0, seg_index=0, event_channel_index=chan_index)
140141
print(f"chan_index: {chan_index} nb_event: {nb_event}\n")

examples/plot_read_proxy_with_lazy_load.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939

4040
############################################################
4141
# as always we can look view some interesting information about the
42-
# metadata and structure of a file just by printing the reader and
42+
# metadata and structure of a file just by printing the reader and
4343
# it's header
4444
print(reader)
4545
print(f"Header information: {reader.header}")
@@ -49,31 +49,33 @@
4949
# Now let's make a function that we want to apply to
5050
# look at lazy vs eager uses of the API
5151

52+
5253
def apply_my_fancy_average(sig_list):
5354
"""basic average along triggers and then channels
5455
here we go back to numpy with magnitude
5556
to be able to use np.stack.
56-
57+
5758
Because neo uses quantities to keep track of units
58-
we can always get just the magnitude of an array
59+
we can always get just the magnitude of an array
5960
with `.magnitude`
6061
"""
6162
sig_list = [s.magnitude for s in sig_list]
6263
sigs = np.stack(sig_list, axis=0)
6364
return np.mean(np.mean(sigs, axis=0), axis=1)
6465

66+
6567
#################################################
6668
# Let's set our limits for both cases. We will
67-
# use quantities to include time dimensions.
69+
# use quantities to include time dimensions.
6870

69-
lim_start = -20 * pq.ms # 20 milliseconds before
70-
lim_end = +20 * pq.ms # 20 milliseconds after
71+
lim_start = -20 * pq.ms # 20 milliseconds before
72+
lim_end = +20 * pq.ms # 20 milliseconds after
7173

7274
##################################################
7375
# We start with eager (where `lazy=False`.) Everything
7476
# is loaded into memory. We will read a segment of data.
75-
# This includes analog signal data and events data
76-
# (final contents of a segment are dependent on the
77+
# This includes analog signal data and events data
78+
# (final contents of a segment are dependent on the
7779
# underlying IO being used)
7880

7981

@@ -86,7 +88,7 @@ def apply_my_fancy_average(sig_list):
8688
anasig_chunk = anasig.time_slice(t0, t1)
8789
all_sig_chunks.append(anasig_chunk)
8890

89-
# After pulling all data into memory and then iterating through triggers
91+
# After pulling all data into memory and then iterating through triggers
9092
# we end by doing our average
9193
m1 = apply_my_fancy_average(all_sig_chunks)
9294

@@ -111,7 +113,7 @@ def apply_my_fancy_average(sig_list):
111113
m2 = apply_my_fancy_average(all_sig_chunks)
112114

113115
##########################################################
114-
# We see that either way the result is the same, but
116+
# We see that either way the result is the same, but
115117
# we do not exhaust our RAM/memory
116118
print(f"Eagerly loading data and averaging: {m1}")
117119
print(f"Lazy loading data and average {m2}")

neo/rawio/axonrawio.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -481,12 +481,12 @@ def parse_axon_soup(filename):
481481
# brittle. pyABF believes that looking for the \x00\x00 is more
482482
# robust. We find these values, replace mu->u, then split into
483483
# a set of strings
484-
indexed_string = big_string[big_string.rfind(b'\x00\x00'):]
484+
indexed_string = big_string[big_string.rfind(b"\x00\x00") :]
485485
# replace mu -> u for easy display
486-
indexed_string = indexed_string.replace(b'\xb5', b'\x75')
486+
indexed_string = indexed_string.replace(b"\xb5", b"\x75")
487487
# we need to remove one of the \x00 to have the indices be
488488
# the correct order
489-
indexed_string = indexed_string.split(b'\x00')[1:]
489+
indexed_string = indexed_string.split(b"\x00")[1:]
490490
strings = indexed_string
491491

492492
# ADC sections

neo/rawio/spikegadgetsrawio.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,9 @@ def _parse_header(self):
177177
chan_ind = 0
178178
self.is_scaleable = "spikeScalingToUv" in sconf[0].attrib
179179
if not self.is_scaleable:
180-
self.logger.warning("Unable to read channel gain scaling (to uV) from .rec header. Data has no physical units!")
180+
self.logger.warning(
181+
"Unable to read channel gain scaling (to uV) from .rec header. Data has no physical units!"
182+
)
181183

182184
for trode in sconf:
183185
if "spikeScalingToUv" in trode.attrib:

neo/test/iotest/test_get_io.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@
33
import platform
44
from neo.io import get_io, list_candidate_ios, NixIO
55
import pytest
6+
67
try:
78
import nixio
9+
810
HAVE_NIX = True
911
except:
1012
HAVE_NIX = False
@@ -35,7 +37,8 @@ def test_list_candidate_ios_filename_stub():
3537

3638
assert NixIO in ios
3739

38-
@pytest.mark.skipif(not HAVE_NIX or platform.system()=='Windows', reason='Need nixio in order to return NixIO class')
40+
41+
@pytest.mark.skipif(not HAVE_NIX or platform.system() == "Windows", reason="Need nixio in order to return NixIO class")
3942
def test_get_io_non_existant_file_writable_io():
4043
# use nixio for testing with writable io
4144
non_existant_file = Path("non_existant_file.nix")
@@ -45,4 +48,4 @@ def test_get_io_non_existant_file_writable_io():
4548
assert isinstance(io, NixIO)
4649

4750
# cleanup
48-
non_existant_file.unlink(missing_ok=True) # cleanup will fail on Windows so need to skip
51+
non_existant_file.unlink(missing_ok=True) # cleanup will fail on Windows so need to skip

neo/test/rawiotest/tools.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ def can_use_network():
1717
return False
1818
try:
1919
import datalad
20+
2021
HAVE_DATALAD = True
2122
except:
2223
HAVE_DATALAD = False

0 commit comments

Comments
 (0)