Skip to content

Commit 296bbb6

Browse files
authored
Update Dev Branch with the latest changes from main (#157)
1 parent d2a0986 commit 296bbb6

File tree

6 files changed

+89
-0
lines changed

6 files changed

+89
-0
lines changed
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
name: 'Prepare Datasets'
2+
description: 'Restores data from caches or downloads it from Google Drive.'
3+
inputs:
4+
rclone-config:
5+
description: 'Rclone configuration'
6+
required: true
7+
runs:
8+
using: 'composite'
9+
steps:
10+
- name: Setup Rclone
11+
uses: AnimMouse/setup-rclone@v1
12+
with:
13+
rclone_config: ${{ inputs.rclone-config }}
14+
15+
- name: Get dataset version hash
16+
shell: bash
17+
run: |
18+
HASH=$(rclone lsl remote:"SampleData" --drive-shared-with-me)
19+
echo "DATASET_HASH=$HASH" >> $GITHUB_OUTPUT
20+
21+
- name: Cache datasets
22+
uses: actions/cache@v4
23+
id: cache-datasets
24+
with:
25+
path: ./testing_data
26+
key: ephys-datasets-${{ steps.ephys.outputs.DATASET_HASH }}
27+
enableCrossOsArchive: true
28+
29+
- if: ${{ steps.cache-datasets.outputs.cache-hit != 'true' }}
30+
name: Download datasets from Google Drive
31+
shell: bash
32+
run: |
33+
rclone copy remote:"SampleData" ./testing_data --drive-shared-with-me

.github/workflows/pr-tests.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
name: PR Tests
2+
3+
on:
4+
pull_request:
5+
types: [synchronize, opened, reopened, ready_for_review]
6+
# Synchronize, open and reopened are the default types for pull request
7+
# We add ready_for_review to trigger the check for changelog and full tests when ready for review is clicked
8+
merge_group:
9+
workflow_dispatch:
10+
11+
concurrency:
12+
group: ${{ github.workflow }}-${{ github.ref }}
13+
cancel-in-progress: true

.github/workflows/run-tests.yml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
name: Run Tests
2+
3+
on:
4+
workflow_call:
5+
inputs:
6+
python-versions:
7+
description: 'List of Python versions to use in matrix, as JSON string'
8+
required: true
9+
type: string
10+
os-versions:
11+
description: 'List of OS versions to use in matrix, as JSON string'
12+
required: true
13+
type: string
14+
workflow_dispatch:

src/guppy/computePsth.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,8 @@ def writeToFile(value: str):
5858
# function to read hdf5 file
5959
def read_hdf5(event, filepath, key):
6060
if event:
61+
event = event.replace("\\","_")
62+
event = event.replace("/","_")
6163
op = os.path.join(filepath, event+'.hdf5')
6264
else:
6365
op = filepath
@@ -72,6 +74,8 @@ def read_hdf5(event, filepath, key):
7274

7375
# function to write hdf5 file
7476
def write_hdf5(data, event, filepath, key):
77+
event = event.replace("\\","_")
78+
event = event.replace("/","_")
7579
op = os.path.join(filepath, event+'.hdf5')
7680

7781
# if file does not exist create a new file
@@ -120,6 +124,8 @@ def create_csv_area_peak(filepath, arr, name, index=[]):
120124

121125
# function to create dataframe for each event PSTH and save it to h5 file
122126
def create_Df(filepath, event, name, psth, columns=[]):
127+
event = event.replace("\\","_")
128+
event = event.replace("/","_")
123129
if name:
124130
op = os.path.join(filepath, event+'_{}.h5'.format(name))
125131
else:
@@ -162,6 +168,8 @@ def create_Df(filepath, event, name, psth, columns=[]):
162168

163169
# function to read h5 file and make a dataframe from it
164170
def read_Df(filepath, event, name):
171+
event = event.replace("\\","_")
172+
event = event.replace("/","_")
165173
if name:
166174
op = os.path.join(filepath, event+'_{}.h5'.format(name))
167175
else:
@@ -222,6 +230,9 @@ def helper_psth(z_score, event, filepath,
222230
bin_psth_trials, use_time_or_trials,
223231
baselineStart, baselineEnd,
224232
naming, just_use_signal):
233+
234+
event = event.replace("\\","_")
235+
event = event.replace("/","_")
225236

226237
sampling_rate = read_hdf5('timeCorrection_'+naming, filepath, 'sampling_rate')[0]
227238

@@ -351,6 +362,9 @@ def helper_psth(z_score, event, filepath,
351362
# function to create PSTH for each event using function helper_psth and save the PSTH to h5 file
352363
def storenamePsth(filepath, event, inputParameters):
353364

365+
event = event.replace("\\","_")
366+
event = event.replace("/","_")
367+
354368
selectForComputePsth = inputParameters['selectForComputePsth']
355369
bin_psth_trials = inputParameters['bin_psth_trials']
356370
use_time_or_trials = inputParameters['use_time_or_trials']
@@ -440,6 +454,9 @@ def helperPSTHPeakAndArea(psth_mean, timestamps, sampling_rate, peak_startPoint,
440454

441455
# function to compute PSTH peak and area using the function helperPSTHPeakAndArea save the values to h5 and csv files.
442456
def findPSTHPeakAndArea(filepath, event, inputParameters):
457+
458+
event = event.replace("\\","_")
459+
event = event.replace("/","_")
443460

444461
#sampling_rate = read_hdf5(storesList[0,0], filepath, 'sampling_rate')
445462
peak_startPoint = inputParameters['peak_startPoint']
@@ -512,6 +529,9 @@ def psth_shape_check(psth):
512529
# function to compute average of group of recordings
513530
def averageForGroup(folderNames, event, inputParameters):
514531

532+
event = event.replace("\\","_")
533+
event = event.replace("/","_")
534+
515535
print("Averaging group of data...")
516536
insertLog("Averaging group of data", logging.DEBUG)
517537
path = []

src/guppy/preprocess.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,8 @@ def read_hdf5(event, filepath, key):
204204

205205
# function to write hdf5 file
206206
def write_hdf5(data, event, filepath, key):
207+
event = event.replace("\\","_")
208+
event = event.replace("/","_")
207209
op = os.path.join(filepath, event+'.hdf5')
208210

209211
# if file does not exist create a new file

src/guppy/visualizePlot.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,8 @@ def insertLog(text, level):
6565

6666
# read h5 file as a dataframe
6767
def read_Df(filepath, event, name):
68+
event = event.replace("\\","_")
69+
event = event.replace("/","_")
6870
if name:
6971
op = os.path.join(filepath, event+'_{}.h5'.format(name))
7072
else:
@@ -649,6 +651,11 @@ def heatmap(self):
649651

650652
# function to combine all the output folders together and preprocess them to use them in helper_plots function
651653
def createPlots(filepath, event, inputParameters):
654+
655+
for i in range(len(event)):
656+
event[i] = event[i].replace("\\","_")
657+
event[i] = event[i].replace("/","_")
658+
652659
average = inputParameters['visualizeAverageResults']
653660
visualize_zscore_or_dff = inputParameters['visualize_zscore_or_dff']
654661

0 commit comments

Comments
 (0)