Skip to content

Commit 7a8fc7a

Browse files
authored
Merge pull request #244 from nf-core/dev-config
make it run for ibis
2 parents 1b91238 + 7fd5ffd commit 7a8fc7a

File tree

39 files changed

+1351
-244
lines changed

39 files changed

+1351
-244
lines changed

.github/workflows/ci.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,9 @@ jobs:
3535
- "singularity"
3636
test_name:
3737
- "test"
38+
- "test_ibis"
39+
- "test_ibis_with_preprocessing"
40+
- "test_noise_eval"
3841
isMaster:
3942
- ${{ github.base_ref == 'master' }}
4043
# Exclude conda and singularity on dev

conf/dev.config

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
params {
2+
config_profile_name = 'Development profile'
3+
config_profile_description = 'Params needed during development'
4+
5+
// container
6+
container_dev = "docker.io/mathysgrapotte/stimulus-py:dev"
7+
}

conf/modules.config

Lines changed: 59 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,8 @@ process {
7575
publishDir = [
7676
path: {"${params.outdir}/data"},
7777
mode: params.publish_dir_mode,
78-
pattern: "*.csv"
78+
pattern: "*.csv",
79+
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
7980
]
8081
ext.prefix = { "${meta.id}.stimulus" }
8182
ext.suffix = { "csv" }
@@ -89,43 +90,91 @@ process {
8990
// main config
9091
// ==============================================================================
9192

92-
withName: "STIMULUS_SPLIT_SPLIT" {
93+
withName: "STIMULUS_SPLIT_TRANSFORM" {
9394
publishDir = [
94-
path: { "${params.outdir}/configs" },
95-
mode: params.publish_dir_mode
95+
path: { "${params.outdir}/configs/${meta.id}" },
96+
mode: params.publish_dir_mode,
97+
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
9698
]
99+
tag = { "${meta.id}" }
100+
if (params.container_dev) {
101+
container = params.container_dev
102+
}
97103
}
98104

99105
withName: "STIMULUS_SPLIT_DATA" {
100106
publishDir = [
101107
enabled: false
102108
]
103-
ext.prefix = { "${meta.id}-split-${meta2.split_id}" }
109+
ext.prefix = { "${meta.id}-split-${meta.split_id}" }
110+
tag = { "${meta.id} - split: ${meta.split_id}" }
111+
if (params.container_dev) {
112+
container = params.container_dev
113+
}
104114
}
105115

106116
withName: "STIMULUS_TRANSFORM_CSV" {
107117
publishDir = [
108-
path: { "${params.outdir}/data" },
118+
path: { "${params.outdir}/transformed_data/${meta.id}" },
109119
mode: params.publish_dir_mode,
110-
when: params.save_data
120+
when: params.save_data,
121+
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
111122
]
112-
ext.prefix = { "${meta.id}-split-${meta2.split_id}-trans-${meta2.transform_id}" }
123+
ext.prefix = { "${meta.id}-split-${meta.split_id}-trans-${meta.transform_id}" }
124+
tag = { "${meta.id} - split: ${meta.split_id} - transform: ${meta.transform_id}" }
125+
if (params.container_dev) {
126+
container = params.container_dev
127+
}
113128
}
114129

115130
withName: "CHECK_MODEL" {
116131
ext.args = { [
117132
params.check_model_num_samples ? "-n ${params.check_model_num_samples}" : '',
118133
params.debug_mode ? "--debug_mode" : ''
119134
].flatten().unique(false).join(' ').trim()}
135+
tag = { "${meta.id} - split: ${meta.split_id} - transform: ${meta.transform_id}" }
136+
if (params.container_dev) {
137+
container = params.container_dev
138+
}
120139
}
121140

122141
withName: "STIMULUS_TUNE" {
123142
ext.args = { [
124143
params.debug_mode ? "--debug_mode" : ''
125144
].flatten().unique(false).join(' ').trim()}
126145
publishDir = [
127-
path: { "${params.outdir}/tune_results/${meta.id}" },
128-
mode: params.publish_dir_mode
146+
path: { "${params.outdir}/tune_results/${meta.id}/${meta.split_id}/${meta.transform_id}" },
147+
mode: params.publish_dir_mode,
148+
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
149+
]
150+
ext.prefix = { "${meta.id}-split-${meta.split_id}-trans-${meta.transform_id}-rep-${meta.replicate}" }
151+
tag = { "${meta.id} - split: ${meta.split_id} - transform: ${meta.transform_id} - replicate: ${meta.replicate}" }
152+
if (params.container_dev) {
153+
container = params.container_dev
154+
}
155+
}
156+
157+
withName: "STIMULUS_PREDICT" {
158+
ext.prefix = { "${meta.id}-split-${meta.split_id}-transf-${meta.transform_id}-rep-${meta.replicate}" }
159+
tag = { "${meta.id} - split: ${meta.split_id} - transform: ${meta.transform_id} - replicate: ${meta.replicate}" }
160+
if (params.container_dev) {
161+
container = params.container_dev
162+
}
163+
}
164+
165+
withName: "STIMULUS_COMPARE_TENSORS_COSINE" {
166+
ext.args = { "--mode cosine_similarity" }
167+
if (params.container_dev) {
168+
container = params.container_dev
169+
}
170+
}
171+
172+
withName: "CONCAT_COSINE" {
173+
ext.prefix = { "summary_cosine" }
174+
publishDir = [
175+
path: { "${params.outdir}/evaluation_results/" },
176+
mode: params.publish_dir_mode,
177+
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
129178
]
130179
}
131180
}

conf/test_ibis.config

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
/*
2+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
3+
Nextflow config file for running minimal tests
4+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5+
Defines input files and everything required to run a fast and simple pipeline test.
6+
7+
Use as follows:
8+
nextflow run main.nf -profile test_ibis,<docker/singularity> --outdir <OUTDIR>
9+
10+
----------------------------------------------------------------------------------------
11+
*/
12+
13+
params {
14+
config_profile_name = 'Test ibis profile'
15+
config_profile_description = 'Minimal ibis test dataset to check pipeline functions'
16+
17+
// Input data
18+
data = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/ibis_SP140/SP140_shade.stimulus.csv'
19+
data_config = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/data.yaml'
20+
model = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/ConvBasic_withEfficientKAN.py'
21+
model_config = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/ConvBasic_withEfficientKAN.yaml'
22+
23+
// output
24+
save_data = true
25+
}
26+
27+
// Limit resources so that this can run on GitHub Actions
28+
process {
29+
maxRetries = params.max_retries
30+
errorStrategy = params.err_start
31+
32+
withLabel:process_low {
33+
cpus = { 1 }
34+
memory = { 4.GB * task.attempt }
35+
time = { 10.m * task.attempt }
36+
}
37+
withLabel:process_medium {
38+
cpus = { 2 }
39+
memory = { 6.GB * task.attempt }
40+
time = { 30.m * task.attempt }
41+
}
42+
withLabel:process_high {
43+
cpus = { 4 }
44+
memory = { 8.GB * task.attempt }
45+
time = { 1.h * task.attempt }
46+
}
47+
}
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/*
2+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
3+
Nextflow config file for running minimal tests
4+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5+
Defines input files and everything required to run a fast and simple pipeline test.
6+
7+
Use as follows:
8+
nextflow run main.nf -profile test_ibis,<docker/singularity> --outdir <OUTDIR>
9+
10+
----------------------------------------------------------------------------------------
11+
*/
12+
13+
params {
14+
config_profile_name = 'Test ibis profile - with preprocessing'
15+
config_profile_description = 'Minimal ibis test dataset (with preprocessing) to check pipeline functions'
16+
17+
// Input data
18+
data = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/ibis_SP140/SP140_ghts.peaks'
19+
data_config = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/data.yaml'
20+
model = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/ConvBasic_withEfficientKAN.py'
21+
model_config = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/ConvBasic_withEfficientKAN.yaml'
22+
preprocessing_config = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/ibis/ibis_SP140/preprocessing.yaml'
23+
genome = 'GRCh38'
24+
bed_peak_size = 40
25+
26+
// output
27+
save_data = true
28+
}
29+
30+
// Limit resources so that this can run on GitHub Actions
31+
process {
32+
maxRetries = params.max_retries
33+
errorStrategy = params.err_start
34+
35+
withLabel:process_low {
36+
cpus = { 1 }
37+
memory = { 4.GB * task.attempt }
38+
time = { 10.m * task.attempt }
39+
}
40+
withLabel:process_medium {
41+
cpus = { 2 }
42+
memory = { 6.GB * task.attempt }
43+
time = { 30.m * task.attempt }
44+
}
45+
withLabel:process_high {
46+
cpus = { 4 }
47+
memory = { 8.GB * task.attempt }
48+
time = { 1.h * task.attempt }
49+
}
50+
}

conf/test_noise_eval.config

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
/*
2+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
3+
Nextflow config file for running minimal tests
4+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5+
Defines input files and everything required to run a fast and simple pipeline test.
6+
7+
Use as follows:
8+
nextflow run nf-core/rnaseq -profile test,<docker/singularity> --outdir <OUTDIR>
9+
10+
----------------------------------------------------------------------------------------
11+
*/
12+
13+
params {
14+
config_profile_name = 'Test profile'
15+
config_profile_description = 'Minimal test dataset to check pipeline functions'
16+
17+
// Input data
18+
data = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/titanic/titanic_stimulus.csv'
19+
data_config = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/titanic/titanic.yaml'
20+
model = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/titanic/titanic_model.py'
21+
model_config = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/titanic/titanic_model.yaml'
22+
23+
// tune parameters
24+
tune_trials_range = "2,4,2"
25+
tune_replicates = 2
26+
27+
// predict data
28+
prediction_data = params.pipelines_testdata_base_path + 'deepmodeloptim/testdata/titanic/titanic_stimulus.csv'
29+
30+
// output
31+
save_data = false
32+
}
33+
34+
// Limit resources so that this can run on GitHub Actions
35+
process {
36+
maxRetries = params.max_retries
37+
errorStrategy = params.err_start
38+
39+
withLabel:process_low {
40+
cpus = { 1 }
41+
memory = { 4.GB * task.attempt }
42+
time = { 10.m * task.attempt }
43+
}
44+
withLabel:process_medium {
45+
cpus = { 2 }
46+
memory = { 6.GB * task.attempt }
47+
time = { 30.m * task.attempt }
48+
}
49+
withLabel:process_high {
50+
cpus = { 4 }
51+
memory = { 8.GB * task.attempt }
52+
time = { 1.h * task.attempt }
53+
}
54+
}

examples/pipeline_generated.json

Lines changed: 0 additions & 28 deletions
This file was deleted.

examples/test.json

Lines changed: 0 additions & 19 deletions
This file was deleted.

0 commit comments

Comments
 (0)