diff --git a/modules.json b/modules.json
index ea85317b..91f4fd9d 100644
--- a/modules.json
+++ b/modules.json
@@ -34,21 +34,6 @@
"branch": "master",
"git_sha": "1fc29f92e439d5631fdf34b8ac4687297d70f5ec",
"installed_by": ["subworkflows"]
- },
- "utils_nextflow_pipeline": {
- "branch": "master",
- "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b",
- "installed_by": ["subworkflows"]
- },
- "utils_nfcore_pipeline": {
- "branch": "master",
- "git_sha": "51ae5406a030d4da1e49e4dab49756844fdd6c7a",
- "installed_by": ["subworkflows"]
- },
- "utils_nfschema_plugin": {
- "branch": "master",
- "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e",
- "installed_by": ["subworkflows"]
}
}
}
diff --git a/nextflow.config b/nextflow.config
index 8be15a06..586c9214 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -255,6 +255,7 @@ manifest {
// Nextflow plugins
plugins {
id 'nf-schema@2.3.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet
+ id 'nf-utils@0.1.0'
}
validation {
diff --git a/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf b/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf
index 802f986f..5592ec83 100644
--- a/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf
+++ b/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf
@@ -8,14 +8,18 @@
========================================================================================
*/
-include { completionEmail } from '../../nf-core/utils_nfcore_pipeline'
-include { completionSummary } from '../../nf-core/utils_nfcore_pipeline'
-include { imNotification } from '../../nf-core/utils_nfcore_pipeline'
-include { paramsSummaryMap } from 'plugin/nf-schema'
-include { samplesheetToList } from 'plugin/nf-schema'
-include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline'
-include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline'
-include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin'
+include { completionEmail } from 'plugin/nf-utils'
+include { completionSummary } from 'plugin/nf-utils'
+include { imNotification } from 'plugin/nf-utils'
+include { paramsSummaryMap } from 'plugin/nf-schema'
+include { samplesheetToList } from 'plugin/nf-schema'
+include { getWorkflowVersion } from 'plugin/nf-utils'
+include { dumpParametersToJSON } from 'plugin/nf-utils'
+include { checkCondaChannels } from 'plugin/nf-utils'
+include { checkConfigProvided } from 'plugin/nf-utils'
+include { checkProfileProvided } from 'plugin/nf-utils'
+include { paramsSummaryLog } from 'plugin/nf-schema'
+include { validateParameters } from 'plugin/nf-schema'
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -24,7 +28,6 @@ include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin'
*/
workflow PIPELINE_INITIALISATION {
-
take:
version // boolean: Display version and exit
validate_params // boolean: Boolean whether to validate parameters against the schema at runtime
@@ -36,31 +39,41 @@ workflow PIPELINE_INITIALISATION {
main:
- //
- // Print version and exit if required and dump pipeline parameters to JSON file
- //
- UTILS_NEXTFLOW_PIPELINE (
- version,
- true,
- outdir,
- workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1
- )
+ ch_versions = Channel.empty()
+
+ // Plugin-based parameter dump and version info
+ if (outdir) {
+ dumpParametersToJSON(outdir, params)
+ }
+ def version_str = getWorkflowVersion(workflow.manifest.version, workflow.commitId)
+ println("Pipeline version: ${version_str}")
+ if (workflow.profile && workflow.profile.contains('conda')) {
+ if (!checkCondaChannels()) {
+ log.warn("Conda channels are not configured correctly!")
+ }
+ }
//
// Validate parameters and generate parameter summary to stdout
//
- UTILS_NFSCHEMA_PLUGIN (
- workflow,
- validate_params,
- null
- )
+ if (validate_params) {
+ // Print parameter summary to stdout. This will display the parameters
+ // that differ from the default given in the JSON schema
+ // TODO log.info(paramsSummaryLog(workflow, parameters_schema: parameters_schema))
+ log.info(paramsSummaryLog(workflow))
+
+ // Validate the parameters using nextflow_schema.json or the schema
+ // given via the validation.parametersSchema configuration option
+ // TODO if (parameters_schema) { validateParameters(parameters_schema: parameters_schema)
+ validateParameters()
+ }
+ else {
+ log.info(paramsSummaryLog(workflow))
+ }
- //
// Check config provided to the pipeline
- //
- UTILS_NFCORE_PIPELINE (
- nextflow_cli_args
- )
+ valid_config = checkConfigProvided()
+ checkProfileProvided(nextflow_cli_args)
//
// Create channel from input file provided through params.input
@@ -68,14 +81,14 @@ workflow PIPELINE_INITIALISATION {
ch_input = file(input)
if (isSraId(ch_input)) {
sraCheckENAMetadataFields(ena_metadata_fields)
- } else {
+ }
+ else {
error('Ids provided via --input not recognised please make sure they are either SRA / ENA / GEO / DDBJ ids!')
}
// Read in ids from --input file
- Channel
- .from(ch_input)
- .splitCsv(header:false, sep:'', strip:true)
+ Channel.from(ch_input)
+ .splitCsv(header: false, sep: '', strip: true)
.map { it[0] }
.unique()
.set { ch_ids }
@@ -91,7 +104,6 @@ workflow PIPELINE_INITIALISATION {
*/
workflow PIPELINE_COMPLETION {
-
take:
email // string: email address
email_on_fail // string: email address sent on pipeline failure
@@ -115,7 +127,7 @@ workflow PIPELINE_COMPLETION {
plaintext_email,
outdir,
monochrome_logs,
- []
+ [],
)
}
@@ -128,7 +140,7 @@ workflow PIPELINE_COMPLETION {
}
workflow.onError {
- log.error "Pipeline failed. Please refer to troubleshooting docs: https://nf-co.re/docs/usage/troubleshooting"
+ log.error("Pipeline failed. Please refer to troubleshooting docs: https://nf-co.re/docs/usage/troubleshooting")
}
}
@@ -157,7 +169,8 @@ def isSraId(input) {
if (num_match > 0) {
if (num_match == total_ids) {
is_sra = true
- } else {
+ }
+ else {
error("Mixture of ids provided via --input: ${no_match_ids.join(', ')}\nPlease provide either SRA / ENA / GEO / DDBJ ids!")
}
}
@@ -170,7 +183,7 @@ def isSraId(input) {
def sraCheckENAMetadataFields(ena_metadata_fields) {
// Check minimal ENA fields are provided to download FastQ files
def valid_ena_metadata_fields = ['run_accession', 'experiment_accession', 'library_layout', 'fastq_ftp', 'fastq_md5']
- def actual_ena_metadata_fields = ena_metadata_fields ? ena_metadata_fields.split(',').collect{ it.trim().toLowerCase() } : valid_ena_metadata_fields
+ def actual_ena_metadata_fields = ena_metadata_fields ? ena_metadata_fields.split(',').collect { it.trim().toLowerCase() } : valid_ena_metadata_fields
if (!actual_ena_metadata_fields.containsAll(valid_ena_metadata_fields)) {
error("Invalid option: '${ena_metadata_fields}'. Minimally required fields for '--ena_metadata_fields': '${valid_ena_metadata_fields.join(',')}'")
}
@@ -179,12 +192,16 @@ def sraCheckENAMetadataFields(ena_metadata_fields) {
// Print a warning after pipeline has completed
//
def sraCurateSamplesheetWarn() {
- log.warn "=============================================================================\n" +
- " Please double-check the samplesheet that has been auto-created by the pipeline.\n\n" +
- " Public databases don't reliably hold information such as strandedness\n" +
- " information, controls etc\n\n" +
- " All of the sample metadata obtained from the ENA has been appended\n" +
- " as additional columns to help you manually curate the samplesheet before\n" +
- " running nf-core/other pipelines.\n" +
- "==================================================================================="
+ log.warn(
+ """=============================================================================
+ Please double-check the samplesheet that has been auto-created by the pipeline.
+
+ Public databases don't reliably hold information such as strandedness
+ information, controls etc
+
+ All of the sample metadata obtained from the ENA has been appended
+ as additional columns to help you manually curate the samplesheet before
+ running nf-core/other pipelines.
+============================================================================="""
+ )
}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
deleted file mode 100644
index d6e593e8..00000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
+++ /dev/null
@@ -1,126 +0,0 @@
-//
-// Subworkflow with functionality that may be useful for any Nextflow pipeline
-//
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- SUBWORKFLOW DEFINITION
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-workflow UTILS_NEXTFLOW_PIPELINE {
- take:
- print_version // boolean: print version
- dump_parameters // boolean: dump parameters
- outdir // path: base directory used to publish pipeline results
- check_conda_channels // boolean: check conda channels
-
- main:
-
- //
- // Print workflow version and exit on --version
- //
- if (print_version) {
- log.info("${workflow.manifest.name} ${getWorkflowVersion()}")
- System.exit(0)
- }
-
- //
- // Dump pipeline parameters to a JSON file
- //
- if (dump_parameters && outdir) {
- dumpParametersToJSON(outdir)
- }
-
- //
- // When running with Conda, warn if channels have not been set-up appropriately
- //
- if (check_conda_channels) {
- checkCondaChannels()
- }
-
- emit:
- dummy_emit = true
-}
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- FUNCTIONS
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-//
-// Generate version string
-//
-def getWorkflowVersion() {
- def version_string = "" as String
- if (workflow.manifest.version) {
- def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
- version_string += "${prefix_v}${workflow.manifest.version}"
- }
-
- if (workflow.commitId) {
- def git_shortsha = workflow.commitId.substring(0, 7)
- version_string += "-g${git_shortsha}"
- }
-
- return version_string
-}
-
-//
-// Dump pipeline parameters to a JSON file
-//
-def dumpParametersToJSON(outdir) {
- def timestamp = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss')
- def filename = "params_${timestamp}.json"
- def temp_pf = new File(workflow.launchDir.toString(), ".${filename}")
- def jsonStr = groovy.json.JsonOutput.toJson(params)
- temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr)
-
- nextflow.extension.FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json")
- temp_pf.delete()
-}
-
-//
-// When running with -profile conda, warn if channels have not been set-up appropriately
-//
-def checkCondaChannels() {
- def parser = new org.yaml.snakeyaml.Yaml()
- def channels = []
- try {
- def config = parser.load("conda config --show channels".execute().text)
- channels = config.channels
- }
- catch (NullPointerException e) {
- log.debug(e)
- log.warn("Could not verify conda channel configuration.")
- return null
- }
- catch (IOException e) {
- log.debug(e)
- log.warn("Could not verify conda channel configuration.")
- return null
- }
-
- // Check that all channels are present
- // This channel list is ordered by required channel priority.
- def required_channels_in_order = ['conda-forge', 'bioconda']
- def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean
-
- // Check that they are in the right order
- def channel_priority_violation = required_channels_in_order != channels.findAll { ch -> ch in required_channels_in_order }
-
- if (channels_missing | channel_priority_violation) {
- log.warn """\
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- There is a problem with your Conda configuration!
- You will need to set-up the conda-forge and bioconda channels correctly.
- Please refer to https://bioconda.github.io/
- The observed channel order is
- ${channels}
- but the following channel order is required:
- ${required_channels_in_order}
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
- """.stripIndent(true)
- }
-}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml b/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml
deleted file mode 100644
index e5c3a0a8..00000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
-name: "UTILS_NEXTFLOW_PIPELINE"
-description: Subworkflow with functionality that may be useful for any Nextflow pipeline
-keywords:
- - utility
- - pipeline
- - initialise
- - version
-components: []
-input:
- - print_version:
- type: boolean
- description: |
- Print the version of the pipeline and exit
- - dump_parameters:
- type: boolean
- description: |
- Dump the parameters of the pipeline to a JSON file
- - output_directory:
- type: directory
- description: Path to output dir to write JSON file to.
- pattern: "results/"
- - check_conda_channel:
- type: boolean
- description: |
- Check if the conda channel priority is correct.
-output:
- - dummy_emit:
- type: boolean
- description: |
- Dummy emit to make nf-core subworkflows lint happy
-authors:
- - "@adamrtalbot"
- - "@drpatelh"
-maintainers:
- - "@adamrtalbot"
- - "@drpatelh"
- - "@maxulysse"
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test
deleted file mode 100644
index 68718e4f..00000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test
+++ /dev/null
@@ -1,54 +0,0 @@
-
-nextflow_function {
-
- name "Test Functions"
- script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf"
- config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config"
- tag 'subworkflows'
- tag 'utils_nextflow_pipeline'
- tag 'subworkflows/utils_nextflow_pipeline'
-
- test("Test Function getWorkflowVersion") {
-
- function "getWorkflowVersion"
-
- then {
- assertAll(
- { assert function.success },
- { assert snapshot(function.result).match() }
- )
- }
- }
-
- test("Test Function dumpParametersToJSON") {
-
- function "dumpParametersToJSON"
-
- when {
- function {
- """
- // define inputs of the function here. Example:
- input[0] = "$outputDir"
- """.stripIndent()
- }
- }
-
- then {
- assertAll(
- { assert function.success }
- )
- }
- }
-
- test("Test Function checkCondaChannels") {
-
- function "checkCondaChannels"
-
- then {
- assertAll(
- { assert function.success },
- { assert snapshot(function.result).match() }
- )
- }
- }
-}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap
deleted file mode 100644
index e3f0baf4..00000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "Test Function getWorkflowVersion": {
- "content": [
- "v9.9.9"
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:02:05.308243"
- },
- "Test Function checkCondaChannels": {
- "content": null,
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:02:12.425833"
- }
-}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
deleted file mode 100644
index 02dbf094..00000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
+++ /dev/null
@@ -1,113 +0,0 @@
-nextflow_workflow {
-
- name "Test Workflow UTILS_NEXTFLOW_PIPELINE"
- script "../main.nf"
- config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config"
- workflow "UTILS_NEXTFLOW_PIPELINE"
- tag 'subworkflows'
- tag 'utils_nextflow_pipeline'
- tag 'subworkflows/utils_nextflow_pipeline'
-
- test("Should run no inputs") {
-
- when {
- workflow {
- """
- print_version = false
- dump_parameters = false
- outdir = null
- check_conda_channels = false
-
- input[0] = print_version
- input[1] = dump_parameters
- input[2] = outdir
- input[3] = check_conda_channels
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.success }
- )
- }
- }
-
- test("Should print version") {
-
- when {
- workflow {
- """
- print_version = true
- dump_parameters = false
- outdir = null
- check_conda_channels = false
-
- input[0] = print_version
- input[1] = dump_parameters
- input[2] = outdir
- input[3] = check_conda_channels
- """
- }
- }
-
- then {
- expect {
- with(workflow) {
- assert success
- assert "nextflow_workflow v9.9.9" in stdout
- }
- }
- }
- }
-
- test("Should dump params") {
-
- when {
- workflow {
- """
- print_version = false
- dump_parameters = true
- outdir = 'results'
- check_conda_channels = false
-
- input[0] = false
- input[1] = true
- input[2] = outdir
- input[3] = false
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.success }
- )
- }
- }
-
- test("Should not create params JSON if no output directory") {
-
- when {
- workflow {
- """
- print_version = false
- dump_parameters = true
- outdir = null
- check_conda_channels = false
-
- input[0] = false
- input[1] = true
- input[2] = outdir
- input[3] = false
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.success }
- )
- }
- }
-}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config
deleted file mode 100644
index a09572e5..00000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config
+++ /dev/null
@@ -1,9 +0,0 @@
-manifest {
- name = 'nextflow_workflow'
- author = """nf-core"""
- homePage = 'https://127.0.0.1'
- description = """Dummy pipeline"""
- nextflowVersion = '!>=23.04.0'
- version = '9.9.9'
- doi = 'https://doi.org/10.5281/zenodo.5070524'
-}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
deleted file mode 100644
index bfd25876..00000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
+++ /dev/null
@@ -1,419 +0,0 @@
-//
-// Subworkflow with utility functions specific to the nf-core pipeline template
-//
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- SUBWORKFLOW DEFINITION
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-workflow UTILS_NFCORE_PIPELINE {
- take:
- nextflow_cli_args
-
- main:
- valid_config = checkConfigProvided()
- checkProfileProvided(nextflow_cli_args)
-
- emit:
- valid_config
-}
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- FUNCTIONS
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-//
-// Warn if a -profile or Nextflow config has not been provided to run the pipeline
-//
-def checkConfigProvided() {
- def valid_config = true as Boolean
- if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) {
- log.warn(
- "[${workflow.manifest.name}] You are attempting to run the pipeline without any custom configuration!\n\n" + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + "Please refer to the quick start section and usage docs for the pipeline.\n "
- )
- valid_config = false
- }
- return valid_config
-}
-
-//
-// Exit pipeline if --profile contains spaces
-//
-def checkProfileProvided(nextflow_cli_args) {
- if (workflow.profile.endsWith(',')) {
- error(
- "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
- )
- }
- if (nextflow_cli_args[0]) {
- log.warn(
- "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
- )
- }
-}
-
-//
-// Generate workflow version string
-//
-def getWorkflowVersion() {
- def version_string = "" as String
- if (workflow.manifest.version) {
- def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
- version_string += "${prefix_v}${workflow.manifest.version}"
- }
-
- if (workflow.commitId) {
- def git_shortsha = workflow.commitId.substring(0, 7)
- version_string += "-g${git_shortsha}"
- }
-
- return version_string
-}
-
-//
-// Get software versions for pipeline
-//
-def processVersionsFromYAML(yaml_file) {
- def yaml = new org.yaml.snakeyaml.Yaml()
- def versions = yaml.load(yaml_file).collectEntries { k, v -> [k.tokenize(':')[-1], v] }
- return yaml.dumpAsMap(versions).trim()
-}
-
-//
-// Get workflow version for pipeline
-//
-def workflowVersionToYAML() {
- return """
- Workflow:
- ${workflow.manifest.name}: ${getWorkflowVersion()}
- Nextflow: ${workflow.nextflow.version}
- """.stripIndent().trim()
-}
-
-//
-// Get channel of software versions used in pipeline in YAML format
-//
-def softwareVersionsToYAML(ch_versions) {
- return ch_versions.unique().map { version -> processVersionsFromYAML(version) }.unique().mix(Channel.of(workflowVersionToYAML()))
-}
-
-//
-// Get workflow summary for MultiQC
-//
-def paramsSummaryMultiqc(summary_params) {
- def summary_section = ''
- summary_params
- .keySet()
- .each { group ->
- def group_params = summary_params.get(group)
- // This gets the parameters of that particular group
- if (group_params) {
- summary_section += "
${group}
\n"
- summary_section += " \n"
- group_params
- .keySet()
- .sort()
- .each { param ->
- summary_section += " - ${param}
- ${group_params.get(param) ?: 'N/A'}
\n"
- }
- summary_section += "
\n"
- }
- }
-
- def yaml_file_text = "id: '${workflow.manifest.name.replace('/', '-')}-summary'\n" as String
- yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n"
- yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
- yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
- yaml_file_text += "plot_type: 'html'\n"
- yaml_file_text += "data: |\n"
- yaml_file_text += "${summary_section}"
-
- return yaml_file_text
-}
-
-//
-// ANSII colours used for terminal logging
-//
-def logColours(monochrome_logs=true) {
- def colorcodes = [:] as Map
-
- // Reset / Meta
- colorcodes['reset'] = monochrome_logs ? '' : "\033[0m"
- colorcodes['bold'] = monochrome_logs ? '' : "\033[1m"
- colorcodes['dim'] = monochrome_logs ? '' : "\033[2m"
- colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m"
- colorcodes['blink'] = monochrome_logs ? '' : "\033[5m"
- colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m"
- colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m"
-
- // Regular Colors
- colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m"
- colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m"
- colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m"
- colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m"
- colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m"
- colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m"
- colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m"
- colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m"
-
- // Bold
- colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m"
- colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m"
- colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m"
- colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m"
- colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m"
- colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m"
- colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m"
- colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m"
-
- // Underline
- colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m"
- colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m"
- colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m"
- colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m"
- colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m"
- colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m"
- colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m"
- colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m"
-
- // High Intensity
- colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m"
- colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m"
- colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m"
- colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m"
- colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m"
- colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m"
- colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m"
- colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m"
-
- // Bold High Intensity
- colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m"
- colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m"
- colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m"
- colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m"
- colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m"
- colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m"
- colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m"
- colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m"
-
- return colorcodes
-}
-
-// Return a single report from an object that may be a Path or List
-//
-def getSingleReport(multiqc_reports) {
- if (multiqc_reports instanceof Path) {
- return multiqc_reports
- } else if (multiqc_reports instanceof List) {
- if (multiqc_reports.size() == 0) {
- log.warn("[${workflow.manifest.name}] No reports found from process 'MULTIQC'")
- return null
- } else if (multiqc_reports.size() == 1) {
- return multiqc_reports.first()
- } else {
- log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one")
- return multiqc_reports.first()
- }
- } else {
- return null
- }
-}
-
-//
-// Construct and send completion email
-//
-def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) {
-
- // Set up the e-mail variables
- def subject = "[${workflow.manifest.name}] Successful: ${workflow.runName}"
- if (!workflow.success) {
- subject = "[${workflow.manifest.name}] FAILED: ${workflow.runName}"
- }
-
- def summary = [:]
- summary_params
- .keySet()
- .sort()
- .each { group ->
- summary << summary_params[group]
- }
-
- def misc_fields = [:]
- misc_fields['Date Started'] = workflow.start
- misc_fields['Date Completed'] = workflow.complete
- misc_fields['Pipeline script file path'] = workflow.scriptFile
- misc_fields['Pipeline script hash ID'] = workflow.scriptId
- if (workflow.repository) {
- misc_fields['Pipeline repository Git URL'] = workflow.repository
- }
- if (workflow.commitId) {
- misc_fields['Pipeline repository Git Commit'] = workflow.commitId
- }
- if (workflow.revision) {
- misc_fields['Pipeline Git branch/tag'] = workflow.revision
- }
- misc_fields['Nextflow Version'] = workflow.nextflow.version
- misc_fields['Nextflow Build'] = workflow.nextflow.build
- misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
-
- def email_fields = [:]
- email_fields['version'] = getWorkflowVersion()
- email_fields['runName'] = workflow.runName
- email_fields['success'] = workflow.success
- email_fields['dateComplete'] = workflow.complete
- email_fields['duration'] = workflow.duration
- email_fields['exitStatus'] = workflow.exitStatus
- email_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
- email_fields['errorReport'] = (workflow.errorReport ?: 'None')
- email_fields['commandLine'] = workflow.commandLine
- email_fields['projectDir'] = workflow.projectDir
- email_fields['summary'] = summary << misc_fields
-
- // On success try attach the multiqc report
- def mqc_report = getSingleReport(multiqc_report)
-
- // Check if we are only sending emails on failure
- def email_address = email
- if (!email && email_on_fail && !workflow.success) {
- email_address = email_on_fail
- }
-
- // Render the TXT template
- def engine = new groovy.text.GStringTemplateEngine()
- def tf = new File("${workflow.projectDir}/assets/email_template.txt")
- def txt_template = engine.createTemplate(tf).make(email_fields)
- def email_txt = txt_template.toString()
-
- // Render the HTML template
- def hf = new File("${workflow.projectDir}/assets/email_template.html")
- def html_template = engine.createTemplate(hf).make(email_fields)
- def email_html = html_template.toString()
-
- // Render the sendmail template
- def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit
- def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()]
- def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt")
- def sendmail_template = engine.createTemplate(sf).make(smail_fields)
- def sendmail_html = sendmail_template.toString()
-
- // Send the HTML e-mail
- def colors = logColours(monochrome_logs) as Map
- if (email_address) {
- try {
- if (plaintext_email) {
- new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML')
- }
- // Try to send HTML e-mail using sendmail
- def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
- sendmail_tf.withWriter { w -> w << sendmail_html }
- ['sendmail', '-t'].execute() << sendmail_html
- log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-")
- }
- catch (Exception msg) {
- log.debug(msg.toString())
- log.debug("Trying with mail instead of sendmail")
- // Catch failures and try with plaintext
- def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address]
- mail_cmd.execute() << email_html
- log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (mail)-")
- }
- }
-
- // Write summary e-mail HTML to a file
- def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html")
- output_hf.withWriter { w -> w << email_html }
- nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html")
- output_hf.delete()
-
- // Write summary e-mail TXT to a file
- def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt")
- output_tf.withWriter { w -> w << email_txt }
- nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt")
- output_tf.delete()
-}
-
-//
-// Print pipeline summary on completion
-//
-def completionSummary(monochrome_logs=true) {
- def colors = logColours(monochrome_logs) as Map
- if (workflow.success) {
- if (workflow.stats.ignoredCount == 0) {
- log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Pipeline completed successfully${colors.reset}-")
- }
- else {
- log.info("-${colors.purple}[${workflow.manifest.name}]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-")
- }
- }
- else {
- log.info("-${colors.purple}[${workflow.manifest.name}]${colors.red} Pipeline completed with errors${colors.reset}-")
- }
-}
-
-//
-// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack
-//
-def imNotification(summary_params, hook_url) {
- def summary = [:]
- summary_params
- .keySet()
- .sort()
- .each { group ->
- summary << summary_params[group]
- }
-
- def misc_fields = [:]
- misc_fields['start'] = workflow.start
- misc_fields['complete'] = workflow.complete
- misc_fields['scriptfile'] = workflow.scriptFile
- misc_fields['scriptid'] = workflow.scriptId
- if (workflow.repository) {
- misc_fields['repository'] = workflow.repository
- }
- if (workflow.commitId) {
- misc_fields['commitid'] = workflow.commitId
- }
- if (workflow.revision) {
- misc_fields['revision'] = workflow.revision
- }
- misc_fields['nxf_version'] = workflow.nextflow.version
- misc_fields['nxf_build'] = workflow.nextflow.build
- misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp
-
- def msg_fields = [:]
- msg_fields['version'] = getWorkflowVersion()
- msg_fields['runName'] = workflow.runName
- msg_fields['success'] = workflow.success
- msg_fields['dateComplete'] = workflow.complete
- msg_fields['duration'] = workflow.duration
- msg_fields['exitStatus'] = workflow.exitStatus
- msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
- msg_fields['errorReport'] = (workflow.errorReport ?: 'None')
- msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "")
- msg_fields['projectDir'] = workflow.projectDir
- msg_fields['summary'] = summary << misc_fields
-
- // Render the JSON template
- def engine = new groovy.text.GStringTemplateEngine()
- // Different JSON depending on the service provider
- // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format
- def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json"
- def hf = new File("${workflow.projectDir}/assets/${json_path}")
- def json_template = engine.createTemplate(hf).make(msg_fields)
- def json_message = json_template.toString()
-
- // POST
- def post = new URL(hook_url).openConnection()
- post.setRequestMethod("POST")
- post.setDoOutput(true)
- post.setRequestProperty("Content-Type", "application/json")
- post.getOutputStream().write(json_message.getBytes("UTF-8"))
- def postRC = post.getResponseCode()
- if (!postRC.equals(200)) {
- log.warn(post.getErrorStream().getText())
- }
-}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml b/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml
deleted file mode 100644
index d08d2434..00000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
-name: "UTILS_NFCORE_PIPELINE"
-description: Subworkflow with utility functions specific to the nf-core pipeline template
-keywords:
- - utility
- - pipeline
- - initialise
- - version
-components: []
-input:
- - nextflow_cli_args:
- type: list
- description: |
- Nextflow CLI positional arguments
-output:
- - success:
- type: boolean
- description: |
- Dummy output to indicate success
-authors:
- - "@adamrtalbot"
-maintainers:
- - "@adamrtalbot"
- - "@maxulysse"
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
deleted file mode 100644
index f117040c..00000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
+++ /dev/null
@@ -1,126 +0,0 @@
-
-nextflow_function {
-
- name "Test Functions"
- script "../main.nf"
- config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config"
- tag "subworkflows"
- tag "subworkflows_nfcore"
- tag "utils_nfcore_pipeline"
- tag "subworkflows/utils_nfcore_pipeline"
-
- test("Test Function checkConfigProvided") {
-
- function "checkConfigProvided"
-
- then {
- assertAll(
- { assert function.success },
- { assert snapshot(function.result).match() }
- )
- }
- }
-
- test("Test Function checkProfileProvided") {
-
- function "checkProfileProvided"
-
- when {
- function {
- """
- input[0] = []
- """
- }
- }
-
- then {
- assertAll(
- { assert function.success },
- { assert snapshot(function.result).match() }
- )
- }
- }
-
- test("Test Function without logColours") {
-
- function "logColours"
-
- when {
- function {
- """
- input[0] = true
- """
- }
- }
-
- then {
- assertAll(
- { assert function.success },
- { assert snapshot(function.result).match() }
- )
- }
- }
-
- test("Test Function with logColours") {
- function "logColours"
-
- when {
- function {
- """
- input[0] = false
- """
- }
- }
-
- then {
- assertAll(
- { assert function.success },
- { assert snapshot(function.result).match() }
- )
- }
- }
-
- test("Test Function getSingleReport with a single file") {
- function "getSingleReport"
-
- when {
- function {
- """
- input[0] = file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true)
- """
- }
- }
-
- then {
- assertAll(
- { assert function.success },
- { assert function.result.contains("test.tsv") }
- )
- }
- }
-
- test("Test Function getSingleReport with multiple files") {
- function "getSingleReport"
-
- when {
- function {
- """
- input[0] = [
- file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true),
- file(params.modules_testdata_base_path + '/generic/tsv/network.tsv', checkIfExists: true),
- file(params.modules_testdata_base_path + '/generic/tsv/expression.tsv', checkIfExists: true)
- ]
- """
- }
- }
-
- then {
- assertAll(
- { assert function.success },
- { assert function.result.contains("test.tsv") },
- { assert !function.result.contains("network.tsv") },
- { assert !function.result.contains("expression.tsv") }
- )
- }
- }
-}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
deleted file mode 100644
index 02c67014..00000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
+++ /dev/null
@@ -1,136 +0,0 @@
-{
- "Test Function checkProfileProvided": {
- "content": null,
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:03:03.360873"
- },
- "Test Function checkConfigProvided": {
- "content": [
- true
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:02:59.729647"
- },
- "Test Function without logColours": {
- "content": [
- {
- "reset": "",
- "bold": "",
- "dim": "",
- "underlined": "",
- "blink": "",
- "reverse": "",
- "hidden": "",
- "black": "",
- "red": "",
- "green": "",
- "yellow": "",
- "blue": "",
- "purple": "",
- "cyan": "",
- "white": "",
- "bblack": "",
- "bred": "",
- "bgreen": "",
- "byellow": "",
- "bblue": "",
- "bpurple": "",
- "bcyan": "",
- "bwhite": "",
- "ublack": "",
- "ured": "",
- "ugreen": "",
- "uyellow": "",
- "ublue": "",
- "upurple": "",
- "ucyan": "",
- "uwhite": "",
- "iblack": "",
- "ired": "",
- "igreen": "",
- "iyellow": "",
- "iblue": "",
- "ipurple": "",
- "icyan": "",
- "iwhite": "",
- "biblack": "",
- "bired": "",
- "bigreen": "",
- "biyellow": "",
- "biblue": "",
- "bipurple": "",
- "bicyan": "",
- "biwhite": ""
- }
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:03:17.969323"
- },
- "Test Function with logColours": {
- "content": [
- {
- "reset": "\u001b[0m",
- "bold": "\u001b[1m",
- "dim": "\u001b[2m",
- "underlined": "\u001b[4m",
- "blink": "\u001b[5m",
- "reverse": "\u001b[7m",
- "hidden": "\u001b[8m",
- "black": "\u001b[0;30m",
- "red": "\u001b[0;31m",
- "green": "\u001b[0;32m",
- "yellow": "\u001b[0;33m",
- "blue": "\u001b[0;34m",
- "purple": "\u001b[0;35m",
- "cyan": "\u001b[0;36m",
- "white": "\u001b[0;37m",
- "bblack": "\u001b[1;30m",
- "bred": "\u001b[1;31m",
- "bgreen": "\u001b[1;32m",
- "byellow": "\u001b[1;33m",
- "bblue": "\u001b[1;34m",
- "bpurple": "\u001b[1;35m",
- "bcyan": "\u001b[1;36m",
- "bwhite": "\u001b[1;37m",
- "ublack": "\u001b[4;30m",
- "ured": "\u001b[4;31m",
- "ugreen": "\u001b[4;32m",
- "uyellow": "\u001b[4;33m",
- "ublue": "\u001b[4;34m",
- "upurple": "\u001b[4;35m",
- "ucyan": "\u001b[4;36m",
- "uwhite": "\u001b[4;37m",
- "iblack": "\u001b[0;90m",
- "ired": "\u001b[0;91m",
- "igreen": "\u001b[0;92m",
- "iyellow": "\u001b[0;93m",
- "iblue": "\u001b[0;94m",
- "ipurple": "\u001b[0;95m",
- "icyan": "\u001b[0;96m",
- "iwhite": "\u001b[0;97m",
- "biblack": "\u001b[1;90m",
- "bired": "\u001b[1;91m",
- "bigreen": "\u001b[1;92m",
- "biyellow": "\u001b[1;93m",
- "biblue": "\u001b[1;94m",
- "bipurple": "\u001b[1;95m",
- "bicyan": "\u001b[1;96m",
- "biwhite": "\u001b[1;97m"
- }
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:03:21.714424"
- }
-}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
deleted file mode 100644
index 8940d32d..00000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
+++ /dev/null
@@ -1,29 +0,0 @@
-nextflow_workflow {
-
- name "Test Workflow UTILS_NFCORE_PIPELINE"
- script "../main.nf"
- config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config"
- workflow "UTILS_NFCORE_PIPELINE"
- tag "subworkflows"
- tag "subworkflows_nfcore"
- tag "utils_nfcore_pipeline"
- tag "subworkflows/utils_nfcore_pipeline"
-
- test("Should run without failures") {
-
- when {
- workflow {
- """
- input[0] = []
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.success },
- { assert snapshot(workflow.out).match() }
- )
- }
- }
-}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap
deleted file mode 100644
index 859d1030..00000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "Should run without failures": {
- "content": [
- {
- "0": [
- true
- ],
- "valid_config": [
- true
- ]
- }
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:03:25.726491"
- }
-}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config b/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config
deleted file mode 100644
index d0a926bf..00000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config
+++ /dev/null
@@ -1,9 +0,0 @@
-manifest {
- name = 'nextflow_workflow'
- author = """nf-core"""
- homePage = 'https://127.0.0.1'
- description = """Dummy pipeline"""
- nextflowVersion = '!>=23.04.0'
- version = '9.9.9'
- doi = 'https://doi.org/10.5281/zenodo.5070524'
-}
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/main.nf b/subworkflows/nf-core/utils_nfschema_plugin/main.nf
deleted file mode 100644
index 4994303e..00000000
--- a/subworkflows/nf-core/utils_nfschema_plugin/main.nf
+++ /dev/null
@@ -1,46 +0,0 @@
-//
-// Subworkflow that uses the nf-schema plugin to validate parameters and render the parameter summary
-//
-
-include { paramsSummaryLog } from 'plugin/nf-schema'
-include { validateParameters } from 'plugin/nf-schema'
-
-workflow UTILS_NFSCHEMA_PLUGIN {
-
- take:
- input_workflow // workflow: the workflow object used by nf-schema to get metadata from the workflow
- validate_params // boolean: validate the parameters
- parameters_schema // string: path to the parameters JSON schema.
- // this has to be the same as the schema given to `validation.parametersSchema`
- // when this input is empty it will automatically use the configured schema or
- // "${projectDir}/nextflow_schema.json" as default. This input should not be empty
- // for meta pipelines
-
- main:
-
- //
- // Print parameter summary to stdout. This will display the parameters
- // that differ from the default given in the JSON schema
- //
- if(parameters_schema) {
- log.info paramsSummaryLog(input_workflow, parameters_schema:parameters_schema)
- } else {
- log.info paramsSummaryLog(input_workflow)
- }
-
- //
- // Validate the parameters using nextflow_schema.json or the schema
- // given via the validation.parametersSchema configuration option
- //
- if(validate_params) {
- if(parameters_schema) {
- validateParameters(parameters_schema:parameters_schema)
- } else {
- validateParameters()
- }
- }
-
- emit:
- dummy_emit = true
-}
-
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/meta.yml b/subworkflows/nf-core/utils_nfschema_plugin/meta.yml
deleted file mode 100644
index f7d9f028..00000000
--- a/subworkflows/nf-core/utils_nfschema_plugin/meta.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
-name: "utils_nfschema_plugin"
-description: Run nf-schema to validate parameters and create a summary of changed parameters
-keywords:
- - validation
- - JSON schema
- - plugin
- - parameters
- - summary
-components: []
-input:
- - input_workflow:
- type: object
- description: |
- The workflow object of the used pipeline.
- This object contains meta data used to create the params summary log
- - validate_params:
- type: boolean
- description: Validate the parameters and error if invalid.
- - parameters_schema:
- type: string
- description: |
- Path to the parameters JSON schema.
- This has to be the same as the schema given to the `validation.parametersSchema` config
- option. When this input is empty it will automatically use the configured schema or
- "${projectDir}/nextflow_schema.json" as default. The schema should not be given in this way
- for meta pipelines.
-output:
- - dummy_emit:
- type: boolean
- description: Dummy emit to make nf-core subworkflows lint happy
-authors:
- - "@nvnieuwk"
-maintainers:
- - "@nvnieuwk"
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
deleted file mode 100644
index 8fb30164..00000000
--- a/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
+++ /dev/null
@@ -1,117 +0,0 @@
-nextflow_workflow {
-
- name "Test Subworkflow UTILS_NFSCHEMA_PLUGIN"
- script "../main.nf"
- workflow "UTILS_NFSCHEMA_PLUGIN"
-
- tag "subworkflows"
- tag "subworkflows_nfcore"
- tag "subworkflows/utils_nfschema_plugin"
- tag "plugin/nf-schema"
-
- config "./nextflow.config"
-
- test("Should run nothing") {
-
- when {
-
- params {
- test_data = ''
- }
-
- workflow {
- """
- validate_params = false
- input[0] = workflow
- input[1] = validate_params
- input[2] = ""
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.success }
- )
- }
- }
-
- test("Should validate params") {
-
- when {
-
- params {
- test_data = ''
- outdir = null
- }
-
- workflow {
- """
- validate_params = true
- input[0] = workflow
- input[1] = validate_params
- input[2] = ""
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.failed },
- { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } }
- )
- }
- }
-
- test("Should run nothing - custom schema") {
-
- when {
-
- params {
- test_data = ''
- }
-
- workflow {
- """
- validate_params = false
- input[0] = workflow
- input[1] = validate_params
- input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json"
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.success }
- )
- }
- }
-
- test("Should validate params - custom schema") {
-
- when {
-
- params {
- test_data = ''
- outdir = null
- }
-
- workflow {
- """
- validate_params = true
- input[0] = workflow
- input[1] = validate_params
- input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json"
- """
- }
- }
-
- then {
- assertAll(
- { assert workflow.failed },
- { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } }
- )
- }
- }
-}
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config b/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config
deleted file mode 100644
index 0907ac58..00000000
--- a/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config
+++ /dev/null
@@ -1,8 +0,0 @@
-plugins {
- id "nf-schema@2.1.0"
-}
-
-validation {
- parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json"
- monochromeLogs = true
-}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json b/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json
deleted file mode 100644
index 331e0d2f..00000000
--- a/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
- "$schema": "https://json-schema.org/draft/2020-12/schema",
- "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json",
- "title": ". pipeline parameters",
- "description": "",
- "type": "object",
- "$defs": {
- "input_output_options": {
- "title": "Input/output options",
- "type": "object",
- "fa_icon": "fas fa-terminal",
- "description": "Define where the pipeline should find input data and save output data.",
- "required": ["outdir"],
- "properties": {
- "validate_params": {
- "type": "boolean",
- "description": "Validate parameters?",
- "default": true,
- "hidden": true
- },
- "outdir": {
- "type": "string",
- "format": "directory-path",
- "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.",
- "fa_icon": "fas fa-folder-open"
- },
- "test_data_base": {
- "type": "string",
- "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules",
- "description": "Base for test data directory",
- "hidden": true
- },
- "test_data": {
- "type": "string",
- "description": "Fake test data param",
- "hidden": true
- }
- }
- },
- "generic_options": {
- "title": "Generic options",
- "type": "object",
- "fa_icon": "fas fa-file-import",
- "description": "Less common options for the pipeline, typically set in a config file.",
- "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.",
- "properties": {
- "help": {
- "type": "boolean",
- "description": "Display help text.",
- "fa_icon": "fas fa-question-circle",
- "hidden": true
- },
- "version": {
- "type": "boolean",
- "description": "Display version and exit.",
- "fa_icon": "fas fa-question-circle",
- "hidden": true
- },
- "logo": {
- "type": "boolean",
- "default": true,
- "description": "Display nf-core logo in console output.",
- "fa_icon": "fas fa-image",
- "hidden": true
- },
- "singularity_pull_docker_container": {
- "type": "boolean",
- "description": "Pull Singularity container from Docker?",
- "hidden": true
- },
- "publish_dir_mode": {
- "type": "string",
- "default": "copy",
- "description": "Method used to save pipeline results to output directory.",
- "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.",
- "fa_icon": "fas fa-copy",
- "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"],
- "hidden": true
- },
- "monochrome_logs": {
- "type": "boolean",
- "description": "Use monochrome_logs",
- "hidden": true
- }
- }
- }
- },
- "allOf": [
- {
- "$ref": "#/$defs/input_output_options"
- },
- {
- "$ref": "#/$defs/generic_options"
- }
- ]
-}
diff --git a/workflows/sra/main.nf b/workflows/sra/main.nf
index 0c8cac0c..bee6896d 100644
--- a/workflows/sra/main.nf
+++ b/workflows/sra/main.nf
@@ -10,7 +10,7 @@ include { SRA_IDS_TO_RUNINFO } from '../../modules/local/sra_ids_to_runinfo
include { SRA_RUNINFO_TO_FTP } from '../../modules/local/sra_runinfo_to_ftp'
include { ASPERA_CLI } from '../../modules/local/aspera_cli'
include { SRA_TO_SAMPLESHEET } from '../../modules/local/sra_to_samplesheet'
-include { softwareVersionsToYAML } from '../../subworkflows/nf-core/utils_nfcore_pipeline'
+// TODO include { softwareVersionsToYAML } from 'plugin/nf-utils'
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -194,3 +194,49 @@ workflow SRA {
THE END
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
+
+//
+// Generate workflow version string
+// FIXME Move this to nf-utils
+//
+def getWorkflowVersion() {
+ def version_string = "" as String
+ if (workflow.manifest.version) {
+ def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
+ version_string += "${prefix_v}${workflow.manifest.version}"
+ }
+
+ if (workflow.commitId) {
+ def git_shortsha = workflow.commitId.substring(0, 7)
+ version_string += "-g${git_shortsha}"
+ }
+
+ return version_string
+}
+
+//
+// Get software versions for pipeline
+//
+def processVersionsFromYAML(yaml_file) {
+ def yaml = new org.yaml.snakeyaml.Yaml()
+ def versions = yaml.load(yaml_file).collectEntries { k, v -> [k.tokenize(':')[-1], v] }
+ return yaml.dumpAsMap(versions).trim()
+}
+
+//
+// Get workflow version for pipeline
+//
+def workflowVersionToYAML() {
+ return """
+ Workflow:
+ ${workflow.manifest.name}: ${getWorkflowVersion()}
+ Nextflow: ${workflow.nextflow.version}
+ """.stripIndent().trim()
+}
+
+//
+// Get channel of software versions used in pipeline in YAML format
+//
+def softwareVersionsToYAML(ch_versions) {
+ return ch_versions.unique().map { version -> processVersionsFromYAML(version) }.unique().mix(Channel.of(workflowVersionToYAML()))
+}