Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
dac2b94
feat: TinyMyo pretraining and finetuning
MatteoFasulo Nov 25, 2025
a9f67ff
feat: add EMG regression head and update TinyMyo model for regression…
MatteoFasulo Dec 7, 2025
01d5084
feat: enhance dataset classes and update TinyMyo model documentation …
MatteoFasulo Dec 8, 2025
8e40940
fix: update safetensor file path handling in checkpoint conversion sc…
MatteoFasulo Dec 10, 2025
0657117
Update TinyMyo model configurations and enhance data handling for pre…
MatteoFasulo Dec 11, 2025
cfd6363
Refactor configuration and model files for EMG dataset and TinyMyo model
MatteoFasulo Dec 11, 2025
e8106e5
Delete .gitignore
Thoriri Dec 19, 2025
9bc3b89
fix: correct parameter settings and addressed suggested changes
MatteoFasulo Dec 19, 2025
b604d99
Update training script handling environment variables default values …
MatteoFasulo Dec 22, 2025
fe100cb
Add TinyMyo logo image to documentation
MatteoFasulo Jan 5, 2026
c35fbb1
Rename TinyMyo logo file
MatteoFasulo Jan 5, 2026
964d968
Changed TinyMyo logo file
MatteoFasulo Jan 5, 2026
d656af7
Add files via upload
MatteoFasulo Jan 5, 2026
2aed0ec
Added TinyMyo arxiv and HuggingFace badge to Readme
MatteoFasulo Jan 5, 2026
32e7006
Added TinyMyo arxiv and HuggingFace badge, why TinyMyo section with c…
MatteoFasulo Jan 5, 2026
15f674a
Update README and TinyMyo documentation with additional model details…
MatteoFasulo Jan 7, 2026
7ee5c65
Add link to scripts for downloading and preprocessing TinyMyo dataset…
MatteoFasulo Jan 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
160 changes: 134 additions & 26 deletions README.md

Large diffs are not rendered by default.

37 changes: 37 additions & 0 deletions config/data_module/emg_finetune_data_module.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
data_module:
_target_: data_module.finetune_data_module.FinetuneDataModule
name: "emg"
cfg:
num_workers: ${num_workers}
batch_size: ${batch_size}
train:
_target_: 'datasets.emg_finetune_dataset.EMGDataset'
hdf5_file: ${env:DATA_PATH}/UCI_EMG/EMG_data_for_gestures-master/h5/train.h5
finetune: true
val:
_target_: 'datasets.emg_finetune_dataset.EMGDataset'
hdf5_file: ${env:DATA_PATH}/UCI_EMG/EMG_data_for_gestures-master/h5/val.h5
finetune: true
test:
_target_: 'datasets.emg_finetune_dataset.EMGDataset'
hdf5_file: ${env:DATA_PATH}/UCI_EMG/EMG_data_for_gestures-master/h5/test.h5
finetune: true
40 changes: 40 additions & 0 deletions config/data_module/emg_pretrain_data_module.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
data_module:
_target_: 'data_module.pretrain_data_module.PretrainDataModule'
name: "emg"
cfg:
num_workers: ${num_workers}
batch_size: ${batch_size}
test: null
train_val_split_ratio: 0.8
datasets:
demo_dataset: null
emg2pose:
_target_: 'datasets.emg_pretrain_dataset.EMGPretrainDataset'
data_dir: "${env:DATA_PATH}/emg2pose/h5/"
db6:
_target_: 'datasets.emg_pretrain_dataset.EMGPretrainDataset'
data_dir: "${env:DATA_PATH}/ninapro/DB6/h5/"
pad_up_to_max_chans: 16
db7:
_target_: 'datasets.emg_pretrain_dataset.EMGPretrainDataset'
data_dir: "${env:DATA_PATH}/ninapro/DB7/h5/"
pad_up_to_max_chans: 16
100 changes: 100 additions & 0 deletions config/experiment/TinyMyo_finetune.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
tag: EMG_finetune

gpus: -1
num_nodes: 1
num_workers: 8
batch_size: 32
max_epochs: 50

training: True
final_validate: True
final_test: True
finetune_pretrained: True
resume: False

layerwise_lr_decay: 0.90
scheduler_type: cosine

pretrained_checkpoint_path: null
pretrained_safetensors_path: null

finetuning:
freeze_layers: False

io:
base_output_path: ${env:DATA_PATH}
checkpoint_dirpath: ${env:CHECKPOINT_DIR}/checkpoints
version: 0

defaults:
- override /data_module: emg_finetune_data_module
- override /model: TinyMyo_finetune
- override /scheduler: cosine
- override /task: finetune_task_TinyMyo
- override /criterion: finetune_criterion

masking:
patch_size: [1, 20]
masking_ratio: 0.50
unmasked_loss_coeff: 0.1

input_normalization:
normalize: False

model:
num_classes: 6
classification_type: "ml"

trainer:
accelerator: gpu
num_nodes: ${num_nodes}
devices: ${gpus}
strategy: auto
max_epochs: ${max_epochs}

model_checkpoint:
save_last: True
monitor: "val_loss"
mode: "min"
save_top_k: 1

callbacks:
early_stopping:
_target_: 'pytorch_lightning.callbacks.EarlyStopping'
monitor: "val_loss"
patience: 7
mode: "min"
verbose: True

optimizer:
optim: 'AdamW'
lr: 5e-4
betas: [0.9, 0.98]
weight_decay: 0.01

scheduler:
trainer: ${trainer}
min_lr: 1e-5
warmup_lr_init: 1e-5
warmup_epochs: 5
total_training_opt_steps: ${max_epochs}
t_in_epochs: True
79 changes: 79 additions & 0 deletions config/experiment/TinyMyo_pretrain.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
tag: EMG_pretrain

gpus: -1
num_nodes: 1
num_workers: 8
batch_size: 128
max_epochs: 50

final_validate: True
final_test: False

pretrained_checkpoint_path: null
io:
base_output_path: ${env:DATA_PATH}
checkpoint_dirpath: ${env:CHECKPOINT_DIR}/checkpoints
version: 0

defaults:
- override /data_module: emg_pretrain_data_module
- override /model: TinyMyo_pretrain
- override /scheduler: cosine
- override /task: pretrain_task_TinyMyo
- override /criterion: pretrain_criterion

masking:
patch_size: [1, 20]
masking_ratio: 0.50
unmasked_loss_coeff: 0.1

input_normalization:
normalize: True

scheduler:
trainer: ${trainer}
min_lr: 1e-6
warmup_lr_init: 1e-6
warmup_epochs: 10
total_training_opt_steps: ${max_epochs}
t_in_epochs: True

trainer:
accelerator: gpu
num_nodes: ${num_nodes}
devices: ${gpus}
strategy: auto
max_epochs: ${max_epochs}
gradient_clip_val: 3
accumulate_grad_batches: 8

model_checkpoint:
save_last: True
monitor: "val_loss"
mode: "min"
save_top_k: 1

optimizer:
optim: 'AdamW'
lr: 1e-4
betas: [0.9, 0.98]
weight_decay: 0.01
33 changes: 33 additions & 0 deletions config/model/TinyMyo_finetune.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
model:
_target_: models.TinyMyo.TinyMyo
img_size: 1000
patch_size: 20
in_chans: 16
embed_dim: 192
n_layer: 8
n_head: 3
mlp_ratio: 4
qkv_bias: True
attn_drop: 0.1
proj_drop: 0.1
drop_path: 0.1
num_classes: ${num_classes}
33 changes: 33 additions & 0 deletions config/model/TinyMyo_pretrain.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
model:
_target_: models.TinyMyo.TinyMyo
img_size: 1000
patch_size: 20
in_chans: 16
embed_dim: 192
n_layer: 8
n_head: 3
mlp_ratio: 4
qkv_bias: True
attn_drop: 0.1
proj_drop: 0.1
drop_path: 0.1
num_classes: 0
21 changes: 21 additions & 0 deletions config/task/finetune_task_TinyMyo.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
task:
_target_: 'tasks.finetune_task_EMG.FinetuneTask'
21 changes: 21 additions & 0 deletions config/task/pretrain_task_TinyMyo.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# @package _global_
#*----------------------------------------------------------------------------*
#* Copyright (C) 2025 ETH Zurich, Switzerland *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Fasulo *
#*----------------------------------------------------------------------------*
task:
_target_: 'tasks.pretrain_task_EMG.MaskTask'
Loading