Skip to content

Commit 5bde952

Browse files
fixing formation again
1 parent d5b27c5 commit 5bde952

File tree

1 file changed

+23
-4
lines changed

1 file changed

+23
-4
lines changed

examples/eeg2025/tutorial_challenge_1.py

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,12 @@
1212
1313
"""
1414

15+
######################################################################
1516
# .. image:: https://colab.research.google.com/assets/colab-badge.svg
1617
# :target: https://colab.research.google.com/github/eeg2025/startkit/blob/main/challenge_1.ipynb
1718
# :alt: Open In Colab
1819

20+
######################################################################
1921
# Preliminary notes
2022
# -----------------
2123
# Before we begin, I just want to make a deal with you, ok?
@@ -28,6 +30,7 @@
2830
# The entire decoding community will only go further when we stop
2931
# solving the same problems over and over again, and it starts working together.
3032

33+
######################################################################
3134
# How can we use the knowledge from one EEG Decoding task into another?
3235
# ---------------------------------------------------------------------
3336
# Transfer learning is a widespread technique used in deep learning. It
@@ -58,12 +61,14 @@
5861
# and fine-tuned on data from another condition, evaluating its capacity to
5962
# generalize with task-specific fine-tuning.
6063

64+
######################################################################
6165
# __________
6266
#
6367
# Note: For simplicity purposes, we will only show how to do the decoding
6468
# directly in our target task, and it is up to the teams to think about
6569
# how to use the passive task to perform the pre-training.
6670
#
71+
######################################################################
6772
# Install dependencies
6873
# --------------------
6974
# For the challenge, we will need two significant dependencies:
@@ -75,6 +80,7 @@
7580
# have braindecode as a dependency.
7681
# you can just run ``pip install eegdash``.
7782

83+
######################################################################
7884
# Imports and setup
7985
# -----------------
8086
from pathlib import Path
@@ -96,6 +102,7 @@
96102
import copy
97103
from joblib import Parallel, delayed
98104

105+
######################################################################
99106
# Check GPU availability
100107
# ----------------------
101108
#
@@ -116,12 +123,14 @@
116123
)
117124
print(msg)
118125

126+
######################################################################
119127
# What are we decoding?
120128
# ---------------------
121129
#
122130
# To start to talk about what we want to analyse, the important thing
123131
# is to understand some basic concepts.
124132
#
133+
######################################################################
125134
# The brain decodes the problem
126135
# =============================
127136
#
@@ -146,6 +155,7 @@
146155
# is the temporal window length/epoch size over the interval of interest.
147156
# Here, :math:`\theta` denotes the parameters learned by the neural network.
148157
#
158+
# ------------------------------------------------------------------------------
149159
# Input/Output definition
150160
# ---------------------------
151161
# For the competition, the HBN-EEG (Healthy Brain Network EEG Datasets)
@@ -159,6 +169,7 @@
159169
# You can use any model you want, as long as it follows the input/output
160170
# definitions above.
161171

172+
######################################################################
162173
# Understand the task: Contrast Change Detection (CCD)
163174
# --------------------------------------------------------
164175
# If you are interested to get more neuroscience insight, we recommend these two references, `HBN-EEG <https://www.biorxiv.org/content/10.1101/2024.10.03.615261v2.full.pdf>`__ and `Langer, N et al. (2017) <https://www.nature.com/articles/sdata201740#Sec2>`__.
@@ -183,15 +194,13 @@
183194
# * The **ramp onset**, the **button press**, and the **feedback** are **time-locked events** that yield ERP-like components.
184195
#
185196
# Your task (**label**) is to predict the response time for the subject during this windows.
186-
#
197+
######################################################################
187198
# In the figure below, we have the timeline representation of the cognitive task:
188-
#
189199
# .. image:: https://eeg2025.github.io/assets/img/image-2.jpg
190-
#
191200

201+
######################################################################
192202
# Stimulus demonstration
193203
# ----------------------
194-
#
195204
# .. raw:: html
196205
#
197206
# <div class="video-wrapper">
@@ -201,6 +210,7 @@
201210
# allowfullscreen></iframe>
202211
# </div>
203212
#
213+
######################################################################
204214
# PyTorch Dataset for the competition
205215
# -----------------------------------
206216
# Now, we have a Pytorch Dataset object that contains the set of recordings for the task
@@ -238,6 +248,7 @@
238248
# And to download all the data all data directly, you can do:
239249
raws = Parallel(n_jobs=-1)(delayed(lambda d: d.raw)(d) for d in dataset_ccd.datasets)
240250

251+
######################################################################
241252
# Alternatives for Downloading the data
242253
# -------------------------------------
243254
#
@@ -254,6 +265,7 @@
254265
# aws s3 sync s3://nmdatasets/NeurIPS25/R1_L100_bdf data/R1_L100_bdf --no-sign-request
255266

256267

268+
######################################################################
257269
# Create windows of interest
258270
# -----------------------------
259271
# So we epoch after the stimulus moment with a beginning shift of 500 ms.
@@ -307,6 +319,7 @@
307319
"response_type",
308320
),
309321
)
322+
######################################################################
310323
# Inspect the label distribution
311324
# -------------------------------
312325
import numpy as np
@@ -326,6 +339,7 @@
326339
plt.show()
327340

328341

342+
######################################################################
329343
# Split the data
330344
# ---------------
331345
# Extract meta information
@@ -377,6 +391,7 @@
377391
print(f"Valid:\t{len(valid_set)}")
378392
print(f"Test:\t{len(test_set)}")
379393

394+
######################################################################
380395
# Create dataloaders
381396
# -------------------
382397
batch_size = 128
@@ -393,6 +408,7 @@
393408
test_set, batch_size=batch_size, shuffle=False, num_workers=num_workers
394409
)
395410

411+
######################################################################
396412
# Build the model
397413
# -----------------
398414
# For neural network models, **to start**, we suggest using `braindecode models <https://braindecode.org/1.2/models/models_table.html>`__ zoo.
@@ -412,6 +428,7 @@
412428
model.to(device)
413429

414430

431+
######################################################################
415432
# Define training and validation functions
416433
# -------------------------------------------
417434
# The rest is our classic PyTorch/torch lighting/skorch training pipeline,
@@ -519,6 +536,7 @@ def valid_model(
519536
return avg_loss, rmse
520537

521538

539+
######################################################################
522540
# Train the model
523541
# ------------------
524542
lr = 1e-3
@@ -569,6 +587,7 @@ def valid_model(
569587
if best_state is not None:
570588
model.load_state_dict(best_state)
571589

590+
######################################################################
572591
# Save the model
573592
# -----------------
574593
torch.save(model.state_dict(), "weights_challenge_1.pt")

0 commit comments

Comments
 (0)