Skip to content

Commit b622f53

Browse files
committed
Restore existence checks
1 parent b5e676b commit b622f53

File tree

1 file changed

+30
-30
lines changed
  • subworkflows/local/prepare_genome

1 file changed

+30
-30
lines changed

subworkflows/local/prepare_genome/main.nf

Lines changed: 30 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -74,18 +74,18 @@ workflow PREPARE_GENOME {
7474
ch_gtf = Channel.empty()
7575
if (gtf) {
7676
if (gtf.endsWith('.gz')) {
77-
ch_gtf = GUNZIP_GTF ([ [:], file(gtf) ]).gunzip.map { it[1] }
77+
ch_gtf = GUNZIP_GTF ([ [:], file(gtf, checkIfExists: true) ]).gunzip.map { it[1] }
7878
ch_versions = ch_versions.mix(GUNZIP_GTF.out.versions)
7979
} else {
80-
ch_gtf = Channel.value(file(gtf))
80+
ch_gtf = Channel.value(file(gtf, checkIfExists: true))
8181
}
8282
} else if (gff) {
8383
def ch_gff
8484
if (gff.endsWith('.gz')) {
85-
ch_gff = GUNZIP_GFF ([ [:], file(gff) ]).gunzip
85+
ch_gff = GUNZIP_GFF ([ [:], file(gff, checkIfExists: true) ]).gunzip
8686
ch_versions = ch_versions.mix(GUNZIP_GFF.out.versions)
8787
} else {
88-
ch_gff = Channel.value(file(gff)).map { [ [:], it ] }
88+
ch_gff = Channel.value(file(gff, checkIfExists: true)).map { [ [:], it ] }
8989
}
9090
ch_gtf = GFFREAD(ch_gff, []).gtf.map { it[1] }
9191
ch_versions = ch_versions.mix(GFFREAD.out.versions)
@@ -100,10 +100,10 @@ workflow PREPARE_GENOME {
100100
if (fasta_provided) {
101101
// Uncompress FASTA if needed
102102
if (fasta.endsWith('.gz')) {
103-
ch_fasta = GUNZIP_FASTA ([ [:], file(fasta) ]).gunzip.map { it[1] }
103+
ch_fasta = GUNZIP_FASTA ([ [:], file(fasta, checkIfExists: true) ]).gunzip.map { it[1] }
104104
ch_versions = ch_versions.mix(GUNZIP_FASTA.out.versions)
105105
} else {
106-
ch_fasta = Channel.value(file(fasta))
106+
ch_fasta = Channel.value(file(fasta, checkIfExists: true))
107107
}
108108
}
109109

@@ -128,10 +128,10 @@ workflow PREPARE_GENOME {
128128
ch_add_fasta = Channel.empty()
129129
if (fasta_provided && additional_fasta) {
130130
if (additional_fasta.endsWith('.gz')) {
131-
ch_add_fasta = GUNZIP_ADDITIONAL_FASTA([ [:], file(additional_fasta) ]).gunzip.map { it[1] }
131+
ch_add_fasta = GUNZIP_ADDITIONAL_FASTA([ [:], file(additional_fasta, checkIfExists: true) ]).gunzip.map { it[1] }
132132
ch_versions = ch_versions.mix(GUNZIP_ADDITIONAL_FASTA.out.versions)
133133
} else {
134-
ch_add_fasta = Channel.value(file(additional_fasta))
134+
ch_add_fasta = Channel.value(file(additional_fasta, checkIfExists: true))
135135
}
136136

137137
CUSTOM_CATADDITIONALFASTA(
@@ -150,10 +150,10 @@ workflow PREPARE_GENOME {
150150
ch_gene_bed = Channel.empty()
151151
if (gene_bed) {
152152
if (gene_bed.endsWith('.gz')) {
153-
ch_gene_bed = GUNZIP_GENE_BED ([ [:], file(gene_bed) ]).gunzip.map { it[1] }
153+
ch_gene_bed = GUNZIP_GENE_BED ([ [:], file(gene_bed, checkIfExists: true) ]).gunzip.map { it[1] }
154154
ch_versions = ch_versions.mix(GUNZIP_GENE_BED.out.versions)
155155
} else {
156-
ch_gene_bed = Channel.value(file(gene_bed))
156+
ch_gene_bed = Channel.value(file(gene_bed, checkIfExists: true))
157157
}
158158
} else {
159159
ch_gene_bed = GTF2BED(ch_gtf).bed
@@ -169,10 +169,10 @@ workflow PREPARE_GENOME {
169169
if (transcript_fasta) {
170170
// Use user-provided transcript FASTA
171171
if (transcript_fasta.endsWith('.gz')) {
172-
ch_transcript_fasta = GUNZIP_TRANSCRIPT_FASTA ([ [:], file(transcript_fasta) ]).gunzip.map { it[1] }
172+
ch_transcript_fasta = GUNZIP_TRANSCRIPT_FASTA ([ [:], file(transcript_fasta, checkIfExists: true) ]).gunzip.map { it[1] }
173173
ch_versions = ch_versions.mix(GUNZIP_TRANSCRIPT_FASTA.out.versions)
174174
} else {
175-
ch_transcript_fasta = Channel.value(file(transcript_fasta))
175+
ch_transcript_fasta = Channel.value(file(transcript_fasta, checkIfExists: true))
176176
}
177177
if (gencode) {
178178
PREPROCESS_TRANSCRIPTS_FASTA_GENCODE(ch_transcript_fasta)
@@ -214,18 +214,18 @@ workflow PREPARE_GENOME {
214214
if (bbsplit_index) {
215215
// Use user-provided bbsplit index
216216
if (bbsplit_index.endsWith('.tar.gz')) {
217-
ch_bbsplit_index = UNTAR_BBSPLIT_INDEX ([ [:], file(bbsplit_index) ]).untar.map { it[1] }
217+
ch_bbsplit_index = UNTAR_BBSPLIT_INDEX ([ [:], file(bbsplit_index, checkIfExists: true) ]).untar.map { it[1] }
218218
ch_versions = ch_versions.mix(UNTAR_BBSPLIT_INDEX.out.versions)
219219
} else {
220-
ch_bbsplit_index = Channel.value(file(bbsplit_index))
220+
ch_bbsplit_index = Channel.value(file(bbsplit_index, checkIfExists: true))
221221
}
222222
}
223223
else if (fasta_provided) {
224224
// Build it from scratch if we have FASTA
225225
Channel
226-
.from(file(bbsplit_fasta_list))
226+
.from(file(bbsplit_fasta_list, checkIfExists: true))
227227
.splitCsv()
228-
.flatMap { id, fafile -> [ [ 'id', id ], [ 'fasta', file(fafile) ] ] }
228+
.flatMap { id, fafile -> [ [ 'id', id ], [ 'fasta', file(fafile, checkIfExists: true) ] ] }
229229
.groupTuple()
230230
.map { it -> it[1] }
231231
.collect { [ it ] }
@@ -256,10 +256,10 @@ workflow PREPARE_GENOME {
256256

257257
if (sortmerna_index) {
258258
if (sortmerna_index.endsWith('.tar.gz')) {
259-
ch_sortmerna_index = UNTAR_SORTMERNA_INDEX ([ [:], file(sortmerna_index) ]).untar.map { it[1] }
259+
ch_sortmerna_index = UNTAR_SORTMERNA_INDEX ([ [:], file(sortmerna_index, checkIfExists: true) ]).untar.map { it[1] }
260260
ch_versions = ch_versions.mix(UNTAR_SORTMERNA_INDEX.out.versions)
261261
} else {
262-
ch_sortmerna_index = Channel.value([ [:], file(sortmerna_index) ])
262+
ch_sortmerna_index = Channel.value([ [:], file(sortmerna_index, checkIfExists: true) ])
263263
}
264264
} else {
265265
// Build new SortMeRNA index from the rRNA references
@@ -280,17 +280,17 @@ workflow PREPARE_GENOME {
280280
if ('star_salmon' in prepare_tool_indices) {
281281
if (star_index) {
282282
if (star_index.endsWith('.tar.gz')) {
283-
ch_star_index = UNTAR_STAR_INDEX ([ [:], file(star_index) ]).untar.map { it[1] }
283+
ch_star_index = UNTAR_STAR_INDEX ([ [:], file(star_index, checkIfExists: true) ]).untar.map { it[1] }
284284
ch_versions = ch_versions.mix(UNTAR_STAR_INDEX.out.versions)
285285
} else {
286-
ch_star_index = Channel.value(file(star_index))
286+
ch_star_index = Channel.value(file(star_index, checkIfExists: true))
287287
}
288288
}
289289
else if (fasta_provided) {
290290
// Build new STAR index
291291
// Possibly check AWS iGenome conditions
292292
def is_aws_igenome = false
293-
if (file(fasta).getName() - '.gz' == 'genome.fa' && file(gtf).getName() - '.gz' == 'genes.gtf') {
293+
if (file(fasta, checkIfExists: true).getName() - '.gz' == 'genome.fa' && file(gtf, checkIfExists: true).getName() - '.gz' == 'genes.gtf') {
294294
is_aws_igenome = true
295295
}
296296
if (is_aws_igenome) {
@@ -313,10 +313,10 @@ workflow PREPARE_GENOME {
313313
if ('star_rsem' in prepare_tool_indices) {
314314
if (rsem_index) {
315315
if (rsem_index.endsWith('.tar.gz')) {
316-
ch_rsem_index = UNTAR_RSEM_INDEX ([ [:], file(rsem_index) ]).untar.map { it[1] }
316+
ch_rsem_index = UNTAR_RSEM_INDEX ([ [:], file(rsem_index, checkIfExists: true) ]).untar.map { it[1] }
317317
ch_versions = ch_versions.mix(UNTAR_RSEM_INDEX.out.versions)
318318
} else {
319-
ch_rsem_index = Channel.value(file(rsem_index))
319+
ch_rsem_index = Channel.value(file(rsem_index, checkIfExists: true))
320320
}
321321
}
322322
else if (fasta_provided) {
@@ -333,7 +333,7 @@ workflow PREPARE_GENOME {
333333
if ('hisat2' in prepare_tool_indices) {
334334
// splicesites
335335
if (splicesites) {
336-
ch_splicesites = Channel.value(file(splicesites))
336+
ch_splicesites = Channel.value(file(splicesites, checkIfExists: true))
337337
}
338338
else if (fasta_provided) {
339339
ch_splicesites = HISAT2_EXTRACTSPLICESITES(ch_gtf.map { [ [:], it ] }).txt.map { it[1] }
@@ -342,10 +342,10 @@ workflow PREPARE_GENOME {
342342
// the index
343343
if (hisat2_index) {
344344
if (hisat2_index.endsWith('.tar.gz')) {
345-
ch_hisat2_index = UNTAR_HISAT2_INDEX ([ [:], file(hisat2_index) ]).untar.map { it[1] }
345+
ch_hisat2_index = UNTAR_HISAT2_INDEX ([ [:], file(hisat2_index, checkIfExists: true) ]).untar.map { it[1] }
346346
ch_versions = ch_versions.mix(UNTAR_HISAT2_INDEX.out.versions)
347347
} else {
348-
ch_hisat2_index = Channel.value(file(hisat2_index))
348+
ch_hisat2_index = Channel.value(file(hisat2_index, checkIfExists: true))
349349
}
350350
}
351351
else if (fasta_provided) {
@@ -366,10 +366,10 @@ workflow PREPARE_GENOME {
366366
if (salmon_index) {
367367
// use user-provided salmon index
368368
if (salmon_index.endsWith('.tar.gz')) {
369-
ch_salmon_index = UNTAR_SALMON_INDEX ([ [:], file(salmon_index) ]).untar.map { it[1] }
369+
ch_salmon_index = UNTAR_SALMON_INDEX ([ [:], file(salmon_index, checkIfExists: true) ]).untar.map { it[1] }
370370
ch_versions = ch_versions.mix(UNTAR_SALMON_INDEX.out.versions)
371371
} else {
372-
ch_salmon_index = Channel.value(file(salmon_index))
372+
ch_salmon_index = Channel.value(file(salmon_index, checkIfExists: true))
373373
}
374374
}
375375
else if (ch_transcript_fasta && fasta_provided) {
@@ -391,10 +391,10 @@ workflow PREPARE_GENOME {
391391
if ('kallisto' in prepare_tool_indices) {
392392
if (kallisto_index) {
393393
if (kallisto_index.endsWith('.tar.gz')) {
394-
ch_kallisto_index = UNTAR_KALLISTO_INDEX ([ [:], file(kallisto_index) ]).untar
394+
ch_kallisto_index = UNTAR_KALLISTO_INDEX ([ [:], file(kallisto_index, checkIfExists: true) ]).untar
395395
ch_versions = ch_versions.mix(UNTAR_KALLISTO_INDEX.out.versions)
396396
} else {
397-
ch_kallisto_index = Channel.value([ [:], file(kallisto_index) ])
397+
ch_kallisto_index = Channel.value([ [:], file(kallisto_index, checkIfExists: true) ])
398398
}
399399
}
400400
else if (ch_transcript_fasta) {

0 commit comments

Comments
 (0)