Skip to content

Commit 78ef8ea

Browse files
committed
merge
2 parents 207e1e8 + efc8080 commit 78ef8ea

File tree

7 files changed

+162
-14
lines changed

7 files changed

+162
-14
lines changed
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
name: Create caches for ephy_testing_data and conda env
2+
3+
on:
4+
workflow_dispatch: # Workflow can be triggered manually via GH actions webinterface
5+
push: # When something is pushed into master this checks if caches need to re-created
6+
branches:
7+
- master
8+
schedule:
9+
- cron: "0 12 * * *" # Daily at noon UTC
10+
11+
jobs:
12+
13+
create-conda-env-cache-if-missing:
14+
name: Caching conda env
15+
runs-on: "ubuntu-latest"
16+
strategy:
17+
fail-fast: true
18+
defaults:
19+
# by default run in bash mode (required for conda usage)
20+
run:
21+
shell: bash -l {0}
22+
steps:
23+
- uses: actions/checkout@v3
24+
25+
- name: Get current year-month
26+
id: date
27+
run: |
28+
echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT
29+
30+
- name: Get current dependencies hash
31+
id: dependencies
32+
run: |
33+
echo "hash=${{hashFiles('**/pyproject.toml', '**/environment_testing.yml')}}" >> $GITHUB_OUTPUT
34+
35+
- uses: actions/cache@v3
36+
# the cache for python package is reset:
37+
# * every month
38+
# * when package dependencies change
39+
id: cache-conda-env
40+
with:
41+
path: /usr/share/miniconda/envs/neo-test-env
42+
key: ${{ runner.os }}-conda-env-${{ steps.dependencies.outputs.hash }}-${{ steps.date.outputs.date }}
43+
44+
- name: Cache found?
45+
run: echo "Cache-hit == ${{steps.cache-conda-env.outputs.cache-hit == 'true'}}"
46+
47+
# activate environment if not restored from cache
48+
- uses: conda-incubator/setup-miniconda@v2
49+
if: steps.cache-conda-env.outputs.cache-hit != 'true'
50+
with:
51+
activate-environment: neo-test-env
52+
python-version: 3.9
53+
54+
- name: Create the conda environment to be cached
55+
if: steps.cache-conda-env.outputs.cache-hit != 'true'
56+
# create conda env, configure git and install pip, neo and test dependencies from master
57+
# for PRs that change dependencies, this environment will be updated in the test workflow
58+
run: |
59+
conda env update neo-test-env --file environment_testing.yml
60+
git config --global user.email "neo_ci@fake_mail.com"
61+
git config --global user.name "neo CI"
62+
python -m pip install -U pip # Official recommended way
63+
pip install --upgrade -e .
64+
pip install .[test]
65+
66+
create-data-cache-if-missing:
67+
name: Caching data env
68+
runs-on: "ubuntu-latest"
69+
steps:
70+
71+
- name: Get current hash (SHA) of the ephy_testing_data repo
72+
id: ephy_testing_data
73+
run: |
74+
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
75+
76+
- uses: actions/cache@v3
77+
# Loading cache of ephys_testing_dataset
78+
id: cache-datasets
79+
with:
80+
path: ~/ephy_testing_data
81+
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data.outputs.dataset_hash }}
82+
83+
- name: Cache found?
84+
run: echo "Cache-hit == ${{steps.cache-datasets.outputs.cache-hit == 'true'}}"
85+
86+
- name: Installing datalad and git-annex
87+
if: steps.cache-datasets.outputs.cache-hit != 'true'
88+
run: |
89+
git config --global user.email "neo_ci@fake_mail.com"
90+
git config --global user.name "neo CI"
91+
python -m pip install -U pip # Official recommended way
92+
pip install datalad-installer
93+
datalad-installer --sudo ok git-annex --method datalad/packages
94+
pip install datalad
95+
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
96+
97+
- name: Download dataset
98+
if: steps.cache-datasets.outputs.cache-hit != 'true'
99+
# Download repository and also fetch data
100+
run: |
101+
cd ~
102+
datalad install --recursive --get-data https://gin.g-node.org/NeuralEnsemble/ephy_testing_data
103+
104+
- name: Show size of the cache to assert data is downloaded
105+
run: |
106+
cd ~
107+
pwd
108+
du -hs ephy_testing_data
109+
cd ephy_testing_data
110+
pwd

.github/workflows/core-test.yml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,18 @@ on:
44
pull_request:
55
branches: [master]
66
types: [synchronize, opened, reopened, ready_for_review]
7+
paths:
8+
- 'neo/core/**'
9+
- 'pyproject.toml'
710

811
# run checks on any change of master, including merge of PRs
912
push:
1013
branches: [master]
1114

15+
concurrency: # Cancel previous workflows on the same pull request
16+
group: ${{ github.workflow }}-${{ github.ref }}
17+
cancel-in-progress: true
18+
1219
jobs:
1320
multi-os-python-numpy:
1421
runs-on: ${{ matrix.os }}

.github/workflows/io-test.yml

Lines changed: 32 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,9 @@ on:
99
push:
1010
branches: [master]
1111

12+
concurrency: # Cancel previous workflows on the same pull request
13+
group: ${{ github.workflow }}-${{ github.ref }}
14+
cancel-in-progress: true
1215

1316
jobs:
1417
build-and-test:
@@ -35,34 +38,44 @@ jobs:
3538

3639
- name: Get ephy_testing_data current head hash
3740
# the key depend on the last commit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
38-
id: ephy_testing_data_hash
41+
id: ephy_testing_data
3942
run: |
40-
echo "latest_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
43+
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
4144
42-
- uses: actions/cache@v3
45+
- uses: actions/cache/restore@v3
4346
# Loading cache of ephys_testing_dataset
4447
id: cache-datasets
4548
with:
4649
path: ~/ephy_testing_data
47-
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data_hash.outputs.latest_hash }}
50+
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data.outputs.dataset_hash }}
51+
restore-keys: ${{ runner.os }}-datasets-
4852

4953
- uses: conda-incubator/setup-miniconda@v2
5054
with:
5155
activate-environment: neo-test-env
5256
python-version: ${{ matrix.python-version }}
53-
clean-patched-environment-file: false
5457

55-
- uses: actions/cache@v3
58+
- name: Get current dependencies hash
59+
id: dependencies
60+
run: |
61+
echo "hash=${{hashFiles('**/pyproject.toml', '**/environment_testing.yml')}}" >> $GITHUB_OUTPUT
62+
63+
- uses: actions/cache/restore@v3
5664
# the cache for python package is reset:
5765
# * every month
58-
# * when requirements/requirements_testing change
66+
# * when package dependencies change
5967
id: cache-conda-env
6068
with:
6169
path: /usr/share/miniconda/envs/neo-test-env
62-
key: ${{ runner.os }}-conda-env-${{ hashFiles('**/pyproject.toml') }}-${{ steps.date.outputs.date }}
70+
key: ${{ runner.os }}-conda-env-${{ steps.dependencies.outputs.hash }}-${{ steps.date.outputs.date }}
71+
# restore-keys match any key that starts with the restore-key
72+
restore-keys: |
73+
${{ runner.os }}-conda-env-${{ steps.dependencies.outputs.hash }}-
74+
${{ runner.os }}-conda-env-
6375
6476
- name: Install testing dependencies
65-
# testing environment is only installed if no cache was found
77+
# testing environment is only created from yml if no cache was found
78+
# restore-key hits should result in `cache-hit` == 'false'
6679
if: steps.cache-conda-env.outputs.cache-hit != 'true'
6780
run: |
6881
conda env update neo-test-env --file environment_testing.yml
@@ -72,11 +85,20 @@ jobs:
7285
git config --global user.email "neo_ci@fake_mail.com"
7386
git config --global user.name "neo CI"
7487
75-
- name: Install neo
88+
- name: Install neo including dependencies
89+
# installation with dependencies is only required if no cache was found
90+
# restore-key hits should result in `cache-hit` == 'false'
91+
if: steps.cache-conda-env.outputs.cache-hit != 'true'
7692
run: |
7793
pip install --upgrade -e .
7894
pip install .[test]
7995
96+
- name: Install neo without dependencies
97+
# only installing neo version to test as dependencies should be in cached conda env already
98+
if: steps.cache-conda-env.outputs.cache-hit == 'true'
99+
run: |
100+
pip install --no-dependencies -e .
101+
80102
- name: Test with pytest
81103
run: |
82104
# only neo.rawio and neo.io

neo/io/nixio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -630,7 +630,7 @@ def write_block(self, block, use_obj_names=False):
630630
metadata["neo_name"] = neoname
631631
nixblock.definition = block.description
632632
if block.rec_datetime:
633-
nix_rec_dt = int(block.rec_datetime.strftime("%s"))
633+
nix_rec_dt = int(block.rec_datetime.timestamp())
634634
nixblock.force_created_at(nix_rec_dt)
635635
if block.file_datetime:
636636
fdt, annotype = dt_to_nix(block.file_datetime)

neo/rawio/neuralynxrawio/ncssections.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -438,7 +438,7 @@ def build_for_ncs_file(ncsMemMap, nlxHdr):
438438

439439
# digital lynx style with fractional frequency and micros per samp determined from
440440
# block times
441-
elif acqType == "DIGITALLYNX" or acqType == "DIGITALLYNXSX" or acqType == 'CHEETAH64':
441+
elif acqType == "DIGITALLYNX" or acqType == "DIGITALLYNXSX" or acqType == 'CHEETAH64' or acqType == 'RAWDATAFILE':
442442
nomFreq = nlxHdr['sampling_rate']
443443
nb = NcsSectionsFactory._buildForMaxGap(ncsMemMap, nomFreq)
444444

neo/rawio/neuralynxrawio/nlxheader.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -293,6 +293,10 @@ def type_of_recording(self):
293293
# Cheetah64
294294
elif self['HardwareSubSystemType'] == 'Cheetah64':
295295
return 'CHEETAH64'
296+
297+
# RawDataFile
298+
elif self['HardwareSubSystemType'] == 'RawDataFile':
299+
return 'RAWDATAFILE'
296300

297301
else:
298302
return 'UNKNOWN'

neo/rawio/spikeglxrawio.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,11 @@
4040
imDatPrb_type=1 (NP 1.0)
4141
imDatPrb_type=21 (NP 2.0, single multiplexed shank)
4242
imDatPrb_type=24 (NP 2.0, 4-shank)
43+
imDatPrb_type=1030 (NP 1.0-NHP 45mm SOI90 - NHP long 90um wide, staggered contacts)
44+
imDatPrb_type=1031 (NP 1.0-NHP 45mm SOI125 - NHP long 125um wide, staggered contacts)
45+
imDatPrb_type=1032 (NP 1.0-NHP 45mm SOI115 / 125 linear - NHP long 125um wide, linear contacts)
46+
imDatPrb_type=1022 (NP 1.0-NHP 25mm - NHP medium)
47+
imDatPrb_type=1015 (NP 1.0-NHP 10mm - NHP short)
4348
4449
Author : Samuel Garcia
4550
Some functions are copied from Graham Findlay
@@ -380,7 +385,7 @@ def extract_stream_info(meta_file, meta):
380385
# metad['imroTbl'] contain two gain per channel AP and LF
381386
# except for the last fake channel
382387
per_channel_gain = np.ones(num_chan, dtype='float64')
383-
if 'imDatPrb_type' not in meta or meta['imDatPrb_type'] == '0':
388+
if 'imDatPrb_type' not in meta or meta['imDatPrb_type'] == '0' or meta['imDatPrb_type'] in ('1015', '1022', '1030', '1031', '1032'):
384389
# This work with NP 1.0 case with different metadata versions
385390
# https://github.com/billkarsh/SpikeGLX/blob/gh-pages/Support/Metadata_3A.md#imec
386391
# https://github.com/billkarsh/SpikeGLX/blob/gh-pages/Support/Metadata_3B1.md#imec
@@ -404,7 +409,7 @@ def extract_stream_info(meta_file, meta):
404409
channel_gains = gain_factor * per_channel_gain * 1e6
405410
else:
406411
raise NotImplementedError('This meta file version of spikeglx'
407-
'is not implemented')
412+
' is not implemented')
408413
else:
409414
stream_kind = ''
410415
stream_name = device

0 commit comments

Comments
 (0)