diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 77797f41a3..d7bbaa8025 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -23,6 +23,22 @@ include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_{{ s include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' {%- if igenomes %} include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' +{%- endif %} + +// Import nf-core-utils plugin functions +include { getWorkflowVersion } from 'plugin/nf-core-utils' +include { dumpParametersToJSON } from 'plugin/nf-core-utils' +include { checkCondaChannels } from 'plugin/nf-core-utils' +{%- if email %} +include { completionEmail } from 'plugin/nf-core-utils' +{%- endif %} +include { completionSummary } from 'plugin/nf-core-utils' +{%- if adaptivecard or slackreport %} +include { imNotification } from 'plugin/nf-core-utils' +{%- endif %} +include { checkConfigProvided } from 'plugin/nf-core-utils' +include { checkProfileProvided } from 'plugin/nf-core-utils' +{%- if igenomes %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 8ae5c19c7e..eb1a237651 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -13,30 +13,14 @@ }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { "branch": "master", - "git_sha": "e10b76ca0c66213581bec2833e30d31f239dec0b", + "git_sha": "c9a31c472ef2d86802eb44f27322955849859361", "installed_by": ["modules"] } {%- endif %} } }, "subworkflows": { - "nf-core": { - "utils_nextflow_pipeline": { - "branch": "master", - "git_sha": "05954dab2ff481bcb999f24455da29a5828af08d", - "installed_by": ["subworkflows"] - }, - "utils_nfcore_pipeline": { - "branch": "master", - "git_sha": "05954dab2ff481bcb999f24455da29a5828af08d", - "installed_by": ["subworkflows"] - }{% if nf_schema %}, - "utils_nfschema_plugin": { - "branch": "master", - "git_sha": "4b406a74dc0449c0401ed87d5bfff4252fd277fd", - "installed_by": ["subworkflows"] - }{% endif %} - } + "nf-core": {} } } } diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index ccd9a797cb..4f883757a9 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -322,6 +322,7 @@ manifest { // Nextflow plugins plugins { id 'nf-schema@2.5.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet + id 'nf-core-utils@0.3.0' // Utility functions for nf-core pipelines } validation { diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index e6c485d1eb..47960573ff 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -8,19 +8,25 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -{% if nf_schema %}include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin' +{% if nf_schema %}include { paramsSummaryLog } from 'plugin/nf-schema' +include { validateParameters } from 'plugin/nf-schema' include { paramsSummaryMap } from 'plugin/nf-schema' include { samplesheetToList } from 'plugin/nf-schema' include { paramsHelp } from 'plugin/nf-schema'{% endif %} {%- if email %} -include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' +include { completionEmail } from 'plugin/nf-core-utils' {%- endif %} -include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' +include { completionSummary } from 'plugin/nf-core-utils' {%- if adaptivecard or slackreport %} -include { imNotification } from '../../nf-core/utils_nfcore_pipeline' +include { imNotification } from 'plugin/nf-core-utils' {%- endif %} -include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' -include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' +include { checkConfigProvided } from 'plugin/nf-core-utils' +include { checkProfileProvided } from 'plugin/nf-core-utils' +include { getWorkflowVersion } from 'plugin/nf-core-utils' +include { dumpParametersToJSON } from 'plugin/nf-core-utils' +include { checkCondaChannels } from 'plugin/nf-core-utils' +include { processVersionsFromFile } from 'plugin/nf-core-utils' +include { workflowVersionToChannel } from 'plugin/nf-core-utils' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -48,58 +54,36 @@ workflow PIPELINE_INITIALISATION { // // Print version and exit if required and dump pipeline parameters to JSON file // - UTILS_NEXTFLOW_PIPELINE ( - version, - true, - outdir, - workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 - ) + if (version) { + log.info("${workflow.manifest.name} ${getWorkflowVersion(workflow.manifest.version, workflow.commitId)}") + System.exit(0) + } + + if (outdir) { + dumpParametersToJSON(outdir, params) + } + + if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { + checkCondaChannels() + } {%- if nf_schema %} // // Validate parameters and generate parameter summary to stdout // + log.info paramsSummaryLog(workflow) - {%- if is_nfcore %} - before_text = """ --\033[2m----------------------------------------------------\033[0m- - \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m -\033[0;34m ___ __ __ __ ___ \033[0;32m/,-._.--~\'\033[0m -\033[0;34m |\\ | |__ __ / ` / \\ |__) |__ \033[0;33m} {\033[0m -\033[0;34m | \\| | \\__, \\__/ | \\ |___ \033[0;32m\\`-._,-`-,\033[0m - \033[0;32m`._,._,\'\033[0m -\033[0;35m {{ name }} ${workflow.manifest.version}\033[0m --\033[2m----------------------------------------------------\033[0m- -""" - after_text = """${workflow.manifest.doi ? "\n* The pipeline\n" : ""}${workflow.manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${workflow.manifest.doi ? "\n" : ""} -* The nf-core framework - https://doi.org/10.1038/s41587-020-0439-x - -* Software dependencies - https://github.com/{{ name }}/blob/{{ default_branch }}/CITATIONS.md -"""{% endif %} - command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir " - - UTILS_NFSCHEMA_PLUGIN ( - workflow, - validate_params, - null, - help, - help_full, - show_hidden, - {% if is_nfcore -%}before_text{%- else %}""{%- endif %}, - {% if is_nfcore -%}after_text{%- else %}""{%- endif %}, - command - ) + if (validate_params) { + validateParameters() + } {%- endif %} // // Check config provided to the pipeline // - UTILS_NFCORE_PIPELINE ( - nextflow_cli_args - ) + checkConfigProvided() + checkProfileProvided(nextflow_cli_args, monochrome_logs) {%- if igenomes %} @@ -188,7 +172,7 @@ workflow PIPELINE_COMPLETION { plaintext_email, outdir, monochrome_logs, - {% if multiqc %}multiqc_reports.getVal(),{% else %}[]{% endif %} + {% if multiqc %}multiqc_reports.getVal(){% else %}[]{% endif %} ) } {%- endif %} @@ -213,6 +197,57 @@ workflow PIPELINE_COMPLETION { ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ +// +// Get channel of software versions used in pipeline in YAML format +// +def softwareVersionsToYAML(ch_versions) { + return ch_versions.unique() + .map { version -> processVersionsFromFile([version.toString()]) } + .unique() + .mix(Channel.fromList(workflowVersionToChannel(workflow.session)).map { it -> + """ + Workflow: + ${it[1]}: ${it[2]} + """.stripIndent().trim() + }) +} + +{%- if multiqc %} +// +// Get workflow summary for MultiQC +// +def paramsSummaryMultiqc(summary_params) { + def summary_section = '' + summary_params + .keySet() + .each { group -> + def group_params = summary_params.get(group) + // This gets the parameters of that particular group + if (group_params) { + summary_section += "

${group}

\n" + summary_section += "
\n" + group_params + .keySet() + .sort() + .each { param -> + summary_section += "
${param}
${group_params.get(param) ?: 'N/A'}
\n" + } + summary_section += "
\n" + } + } + + def yaml_file_text = "id: '${workflow.manifest.name.replace('/', '-')}-summary'\n" as String + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" + + return yaml_file_text +} +{%- endif %} + {%- if igenomes %} // // Check and validate pipeline parameters diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf deleted file mode 100644 index d6e593e852..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ /dev/null @@ -1,126 +0,0 @@ -// -// Subworkflow with functionality that may be useful for any Nextflow pipeline -// - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - SUBWORKFLOW DEFINITION -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -workflow UTILS_NEXTFLOW_PIPELINE { - take: - print_version // boolean: print version - dump_parameters // boolean: dump parameters - outdir // path: base directory used to publish pipeline results - check_conda_channels // boolean: check conda channels - - main: - - // - // Print workflow version and exit on --version - // - if (print_version) { - log.info("${workflow.manifest.name} ${getWorkflowVersion()}") - System.exit(0) - } - - // - // Dump pipeline parameters to a JSON file - // - if (dump_parameters && outdir) { - dumpParametersToJSON(outdir) - } - - // - // When running with Conda, warn if channels have not been set-up appropriately - // - if (check_conda_channels) { - checkCondaChannels() - } - - emit: - dummy_emit = true -} - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - FUNCTIONS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -// -// Generate version string -// -def getWorkflowVersion() { - def version_string = "" as String - if (workflow.manifest.version) { - def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' - version_string += "${prefix_v}${workflow.manifest.version}" - } - - if (workflow.commitId) { - def git_shortsha = workflow.commitId.substring(0, 7) - version_string += "-g${git_shortsha}" - } - - return version_string -} - -// -// Dump pipeline parameters to a JSON file -// -def dumpParametersToJSON(outdir) { - def timestamp = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss') - def filename = "params_${timestamp}.json" - def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") - def jsonStr = groovy.json.JsonOutput.toJson(params) - temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr) - - nextflow.extension.FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") - temp_pf.delete() -} - -// -// When running with -profile conda, warn if channels have not been set-up appropriately -// -def checkCondaChannels() { - def parser = new org.yaml.snakeyaml.Yaml() - def channels = [] - try { - def config = parser.load("conda config --show channels".execute().text) - channels = config.channels - } - catch (NullPointerException e) { - log.debug(e) - log.warn("Could not verify conda channel configuration.") - return null - } - catch (IOException e) { - log.debug(e) - log.warn("Could not verify conda channel configuration.") - return null - } - - // Check that all channels are present - // This channel list is ordered by required channel priority. - def required_channels_in_order = ['conda-forge', 'bioconda'] - def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean - - // Check that they are in the right order - def channel_priority_violation = required_channels_in_order != channels.findAll { ch -> ch in required_channels_in_order } - - if (channels_missing | channel_priority_violation) { - log.warn """\ - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - There is a problem with your Conda configuration! - You will need to set-up the conda-forge and bioconda channels correctly. - Please refer to https://bioconda.github.io/ - The observed channel order is - ${channels} - but the following channel order is required: - ${required_channels_in_order} - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - """.stripIndent(true) - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml deleted file mode 100644 index e5c3a0a828..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml +++ /dev/null @@ -1,38 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json -name: "UTILS_NEXTFLOW_PIPELINE" -description: Subworkflow with functionality that may be useful for any Nextflow pipeline -keywords: - - utility - - pipeline - - initialise - - version -components: [] -input: - - print_version: - type: boolean - description: | - Print the version of the pipeline and exit - - dump_parameters: - type: boolean - description: | - Dump the parameters of the pipeline to a JSON file - - output_directory: - type: directory - description: Path to output dir to write JSON file to. - pattern: "results/" - - check_conda_channel: - type: boolean - description: | - Check if the conda channel priority is correct. -output: - - dummy_emit: - type: boolean - description: | - Dummy emit to make nf-core subworkflows lint happy -authors: - - "@adamrtalbot" - - "@drpatelh" -maintainers: - - "@adamrtalbot" - - "@drpatelh" - - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test deleted file mode 100644 index 68718e4f59..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test +++ /dev/null @@ -1,54 +0,0 @@ - -nextflow_function { - - name "Test Functions" - script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" - config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" - tag 'subworkflows' - tag 'utils_nextflow_pipeline' - tag 'subworkflows/utils_nextflow_pipeline' - - test("Test Function getWorkflowVersion") { - - function "getWorkflowVersion" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function dumpParametersToJSON") { - - function "dumpParametersToJSON" - - when { - function { - """ - // define inputs of the function here. Example: - input[0] = "$outputDir" - """.stripIndent() - } - } - - then { - assertAll( - { assert function.success } - ) - } - } - - test("Test Function checkCondaChannels") { - - function "checkCondaChannels" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap deleted file mode 100644 index e3f0baf473..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap +++ /dev/null @@ -1,20 +0,0 @@ -{ - "Test Function getWorkflowVersion": { - "content": [ - "v9.9.9" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:02:05.308243" - }, - "Test Function checkCondaChannels": { - "content": null, - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:02:12.425833" - } -} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test deleted file mode 100644 index 02dbf094cd..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test +++ /dev/null @@ -1,113 +0,0 @@ -nextflow_workflow { - - name "Test Workflow UTILS_NEXTFLOW_PIPELINE" - script "../main.nf" - config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" - workflow "UTILS_NEXTFLOW_PIPELINE" - tag 'subworkflows' - tag 'utils_nextflow_pipeline' - tag 'subworkflows/utils_nextflow_pipeline' - - test("Should run no inputs") { - - when { - workflow { - """ - print_version = false - dump_parameters = false - outdir = null - check_conda_channels = false - - input[0] = print_version - input[1] = dump_parameters - input[2] = outdir - input[3] = check_conda_channels - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } - - test("Should print version") { - - when { - workflow { - """ - print_version = true - dump_parameters = false - outdir = null - check_conda_channels = false - - input[0] = print_version - input[1] = dump_parameters - input[2] = outdir - input[3] = check_conda_channels - """ - } - } - - then { - expect { - with(workflow) { - assert success - assert "nextflow_workflow v9.9.9" in stdout - } - } - } - } - - test("Should dump params") { - - when { - workflow { - """ - print_version = false - dump_parameters = true - outdir = 'results' - check_conda_channels = false - - input[0] = false - input[1] = true - input[2] = outdir - input[3] = false - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } - - test("Should not create params JSON if no output directory") { - - when { - workflow { - """ - print_version = false - dump_parameters = true - outdir = null - check_conda_channels = false - - input[0] = false - input[1] = true - input[2] = outdir - input[3] = false - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config deleted file mode 100644 index a09572e5bb..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config +++ /dev/null @@ -1,9 +0,0 @@ -manifest { - name = 'nextflow_workflow' - author = """nf-core""" - homePage = 'https://127.0.0.1' - description = """Dummy pipeline""" - nextflowVersion = '!>=23.04.0' - version = '9.9.9' - doi = 'https://doi.org/10.5281/zenodo.5070524' -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf deleted file mode 100644 index bfd258760d..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ /dev/null @@ -1,419 +0,0 @@ -// -// Subworkflow with utility functions specific to the nf-core pipeline template -// - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - SUBWORKFLOW DEFINITION -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -workflow UTILS_NFCORE_PIPELINE { - take: - nextflow_cli_args - - main: - valid_config = checkConfigProvided() - checkProfileProvided(nextflow_cli_args) - - emit: - valid_config -} - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - FUNCTIONS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -// -// Warn if a -profile or Nextflow config has not been provided to run the pipeline -// -def checkConfigProvided() { - def valid_config = true as Boolean - if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { - log.warn( - "[${workflow.manifest.name}] You are attempting to run the pipeline without any custom configuration!\n\n" + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + "Please refer to the quick start section and usage docs for the pipeline.\n " - ) - valid_config = false - } - return valid_config -} - -// -// Exit pipeline if --profile contains spaces -// -def checkProfileProvided(nextflow_cli_args) { - if (workflow.profile.endsWith(',')) { - error( - "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" - ) - } - if (nextflow_cli_args[0]) { - log.warn( - "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" - ) - } -} - -// -// Generate workflow version string -// -def getWorkflowVersion() { - def version_string = "" as String - if (workflow.manifest.version) { - def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' - version_string += "${prefix_v}${workflow.manifest.version}" - } - - if (workflow.commitId) { - def git_shortsha = workflow.commitId.substring(0, 7) - version_string += "-g${git_shortsha}" - } - - return version_string -} - -// -// Get software versions for pipeline -// -def processVersionsFromYAML(yaml_file) { - def yaml = new org.yaml.snakeyaml.Yaml() - def versions = yaml.load(yaml_file).collectEntries { k, v -> [k.tokenize(':')[-1], v] } - return yaml.dumpAsMap(versions).trim() -} - -// -// Get workflow version for pipeline -// -def workflowVersionToYAML() { - return """ - Workflow: - ${workflow.manifest.name}: ${getWorkflowVersion()} - Nextflow: ${workflow.nextflow.version} - """.stripIndent().trim() -} - -// -// Get channel of software versions used in pipeline in YAML format -// -def softwareVersionsToYAML(ch_versions) { - return ch_versions.unique().map { version -> processVersionsFromYAML(version) }.unique().mix(Channel.of(workflowVersionToYAML())) -} - -// -// Get workflow summary for MultiQC -// -def paramsSummaryMultiqc(summary_params) { - def summary_section = '' - summary_params - .keySet() - .each { group -> - def group_params = summary_params.get(group) - // This gets the parameters of that particular group - if (group_params) { - summary_section += "

${group}

\n" - summary_section += "
\n" - group_params - .keySet() - .sort() - .each { param -> - summary_section += "
${param}
${group_params.get(param) ?: 'N/A'}
\n" - } - summary_section += "
\n" - } - } - - def yaml_file_text = "id: '${workflow.manifest.name.replace('/', '-')}-summary'\n" as String - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" - - return yaml_file_text -} - -// -// ANSII colours used for terminal logging -// -def logColours(monochrome_logs=true) { - def colorcodes = [:] as Map - - // Reset / Meta - colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" - colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" - colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" - colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" - colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" - colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" - colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" - - // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" - - // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" - - // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" - - // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" - - // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" - - return colorcodes -} - -// Return a single report from an object that may be a Path or List -// -def getSingleReport(multiqc_reports) { - if (multiqc_reports instanceof Path) { - return multiqc_reports - } else if (multiqc_reports instanceof List) { - if (multiqc_reports.size() == 0) { - log.warn("[${workflow.manifest.name}] No reports found from process 'MULTIQC'") - return null - } else if (multiqc_reports.size() == 1) { - return multiqc_reports.first() - } else { - log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") - return multiqc_reports.first() - } - } else { - return null - } -} - -// -// Construct and send completion email -// -def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) { - - // Set up the e-mail variables - def subject = "[${workflow.manifest.name}] Successful: ${workflow.runName}" - if (!workflow.success) { - subject = "[${workflow.manifest.name}] FAILED: ${workflow.runName}" - } - - def summary = [:] - summary_params - .keySet() - .sort() - .each { group -> - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['Date Started'] = workflow.start - misc_fields['Date Completed'] = workflow.complete - misc_fields['Pipeline script file path'] = workflow.scriptFile - misc_fields['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) { - misc_fields['Pipeline repository Git URL'] = workflow.repository - } - if (workflow.commitId) { - misc_fields['Pipeline repository Git Commit'] = workflow.commitId - } - if (workflow.revision) { - misc_fields['Pipeline Git branch/tag'] = workflow.revision - } - misc_fields['Nextflow Version'] = workflow.nextflow.version - misc_fields['Nextflow Build'] = workflow.nextflow.build - misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp - - def email_fields = [:] - email_fields['version'] = getWorkflowVersion() - email_fields['runName'] = workflow.runName - email_fields['success'] = workflow.success - email_fields['dateComplete'] = workflow.complete - email_fields['duration'] = workflow.duration - email_fields['exitStatus'] = workflow.exitStatus - email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - email_fields['errorReport'] = (workflow.errorReport ?: 'None') - email_fields['commandLine'] = workflow.commandLine - email_fields['projectDir'] = workflow.projectDir - email_fields['summary'] = summary << misc_fields - - // On success try attach the multiqc report - def mqc_report = getSingleReport(multiqc_report) - - // Check if we are only sending emails on failure - def email_address = email - if (!email && email_on_fail && !workflow.success) { - email_address = email_on_fail - } - - // Render the TXT template - def engine = new groovy.text.GStringTemplateEngine() - def tf = new File("${workflow.projectDir}/assets/email_template.txt") - def txt_template = engine.createTemplate(tf).make(email_fields) - def email_txt = txt_template.toString() - - // Render the HTML template - def hf = new File("${workflow.projectDir}/assets/email_template.html") - def html_template = engine.createTemplate(hf).make(email_fields) - def email_html = html_template.toString() - - // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit - def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] - def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") - def sendmail_template = engine.createTemplate(sf).make(smail_fields) - def sendmail_html = sendmail_template.toString() - - // Send the HTML e-mail - def colors = logColours(monochrome_logs) as Map - if (email_address) { - try { - if (plaintext_email) { - new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') - } - // Try to send HTML e-mail using sendmail - def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") - sendmail_tf.withWriter { w -> w << sendmail_html } - ['sendmail', '-t'].execute() << sendmail_html - log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") - } - catch (Exception msg) { - log.debug(msg.toString()) - log.debug("Trying with mail instead of sendmail") - // Catch failures and try with plaintext - def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] - mail_cmd.execute() << email_html - log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (mail)-") - } - } - - // Write summary e-mail HTML to a file - def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") - output_hf.withWriter { w -> w << email_html } - nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html") - output_hf.delete() - - // Write summary e-mail TXT to a file - def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") - output_tf.withWriter { w -> w << email_txt } - nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt") - output_tf.delete() -} - -// -// Print pipeline summary on completion -// -def completionSummary(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - if (workflow.success) { - if (workflow.stats.ignoredCount == 0) { - log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Pipeline completed successfully${colors.reset}-") - } - else { - log.info("-${colors.purple}[${workflow.manifest.name}]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-") - } - } - else { - log.info("-${colors.purple}[${workflow.manifest.name}]${colors.red} Pipeline completed with errors${colors.reset}-") - } -} - -// -// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack -// -def imNotification(summary_params, hook_url) { - def summary = [:] - summary_params - .keySet() - .sort() - .each { group -> - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['start'] = workflow.start - misc_fields['complete'] = workflow.complete - misc_fields['scriptfile'] = workflow.scriptFile - misc_fields['scriptid'] = workflow.scriptId - if (workflow.repository) { - misc_fields['repository'] = workflow.repository - } - if (workflow.commitId) { - misc_fields['commitid'] = workflow.commitId - } - if (workflow.revision) { - misc_fields['revision'] = workflow.revision - } - misc_fields['nxf_version'] = workflow.nextflow.version - misc_fields['nxf_build'] = workflow.nextflow.build - misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp - - def msg_fields = [:] - msg_fields['version'] = getWorkflowVersion() - msg_fields['runName'] = workflow.runName - msg_fields['success'] = workflow.success - msg_fields['dateComplete'] = workflow.complete - msg_fields['duration'] = workflow.duration - msg_fields['exitStatus'] = workflow.exitStatus - msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") - msg_fields['projectDir'] = workflow.projectDir - msg_fields['summary'] = summary << misc_fields - - // Render the JSON template - def engine = new groovy.text.GStringTemplateEngine() - // Different JSON depending on the service provider - // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format - def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" - def hf = new File("${workflow.projectDir}/assets/${json_path}") - def json_template = engine.createTemplate(hf).make(msg_fields) - def json_message = json_template.toString() - - // POST - def post = new URL(hook_url).openConnection() - post.setRequestMethod("POST") - post.setDoOutput(true) - post.setRequestProperty("Content-Type", "application/json") - post.getOutputStream().write(json_message.getBytes("UTF-8")) - def postRC = post.getResponseCode() - if (!postRC.equals(200)) { - log.warn(post.getErrorStream().getText()) - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml deleted file mode 100644 index d08d24342d..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml +++ /dev/null @@ -1,24 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json -name: "UTILS_NFCORE_PIPELINE" -description: Subworkflow with utility functions specific to the nf-core pipeline template -keywords: - - utility - - pipeline - - initialise - - version -components: [] -input: - - nextflow_cli_args: - type: list - description: | - Nextflow CLI positional arguments -output: - - success: - type: boolean - description: | - Dummy output to indicate success -authors: - - "@adamrtalbot" -maintainers: - - "@adamrtalbot" - - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test deleted file mode 100644 index f117040cbd..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ /dev/null @@ -1,126 +0,0 @@ - -nextflow_function { - - name "Test Functions" - script "../main.nf" - config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" - tag "subworkflows" - tag "subworkflows_nfcore" - tag "utils_nfcore_pipeline" - tag "subworkflows/utils_nfcore_pipeline" - - test("Test Function checkConfigProvided") { - - function "checkConfigProvided" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function checkProfileProvided") { - - function "checkProfileProvided" - - when { - function { - """ - input[0] = [] - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function without logColours") { - - function "logColours" - - when { - function { - """ - input[0] = true - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function with logColours") { - function "logColours" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function getSingleReport with a single file") { - function "getSingleReport" - - when { - function { - """ - input[0] = file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true) - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert function.result.contains("test.tsv") } - ) - } - } - - test("Test Function getSingleReport with multiple files") { - function "getSingleReport" - - when { - function { - """ - input[0] = [ - file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true), - file(params.modules_testdata_base_path + '/generic/tsv/network.tsv', checkIfExists: true), - file(params.modules_testdata_base_path + '/generic/tsv/expression.tsv', checkIfExists: true) - ] - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert function.result.contains("test.tsv") }, - { assert !function.result.contains("network.tsv") }, - { assert !function.result.contains("expression.tsv") } - ) - } - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap deleted file mode 100644 index 02c6701413..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap +++ /dev/null @@ -1,136 +0,0 @@ -{ - "Test Function checkProfileProvided": { - "content": null, - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:03.360873" - }, - "Test Function checkConfigProvided": { - "content": [ - true - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:02:59.729647" - }, - "Test Function without logColours": { - "content": [ - { - "reset": "", - "bold": "", - "dim": "", - "underlined": "", - "blink": "", - "reverse": "", - "hidden": "", - "black": "", - "red": "", - "green": "", - "yellow": "", - "blue": "", - "purple": "", - "cyan": "", - "white": "", - "bblack": "", - "bred": "", - "bgreen": "", - "byellow": "", - "bblue": "", - "bpurple": "", - "bcyan": "", - "bwhite": "", - "ublack": "", - "ured": "", - "ugreen": "", - "uyellow": "", - "ublue": "", - "upurple": "", - "ucyan": "", - "uwhite": "", - "iblack": "", - "ired": "", - "igreen": "", - "iyellow": "", - "iblue": "", - "ipurple": "", - "icyan": "", - "iwhite": "", - "biblack": "", - "bired": "", - "bigreen": "", - "biyellow": "", - "biblue": "", - "bipurple": "", - "bicyan": "", - "biwhite": "" - } - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:17.969323" - }, - "Test Function with logColours": { - "content": [ - { - "reset": "\u001b[0m", - "bold": "\u001b[1m", - "dim": "\u001b[2m", - "underlined": "\u001b[4m", - "blink": "\u001b[5m", - "reverse": "\u001b[7m", - "hidden": "\u001b[8m", - "black": "\u001b[0;30m", - "red": "\u001b[0;31m", - "green": "\u001b[0;32m", - "yellow": "\u001b[0;33m", - "blue": "\u001b[0;34m", - "purple": "\u001b[0;35m", - "cyan": "\u001b[0;36m", - "white": "\u001b[0;37m", - "bblack": "\u001b[1;30m", - "bred": "\u001b[1;31m", - "bgreen": "\u001b[1;32m", - "byellow": "\u001b[1;33m", - "bblue": "\u001b[1;34m", - "bpurple": "\u001b[1;35m", - "bcyan": "\u001b[1;36m", - "bwhite": "\u001b[1;37m", - "ublack": "\u001b[4;30m", - "ured": "\u001b[4;31m", - "ugreen": "\u001b[4;32m", - "uyellow": "\u001b[4;33m", - "ublue": "\u001b[4;34m", - "upurple": "\u001b[4;35m", - "ucyan": "\u001b[4;36m", - "uwhite": "\u001b[4;37m", - "iblack": "\u001b[0;90m", - "ired": "\u001b[0;91m", - "igreen": "\u001b[0;92m", - "iyellow": "\u001b[0;93m", - "iblue": "\u001b[0;94m", - "ipurple": "\u001b[0;95m", - "icyan": "\u001b[0;96m", - "iwhite": "\u001b[0;97m", - "biblack": "\u001b[1;90m", - "bired": "\u001b[1;91m", - "bigreen": "\u001b[1;92m", - "biyellow": "\u001b[1;93m", - "biblue": "\u001b[1;94m", - "bipurple": "\u001b[1;95m", - "bicyan": "\u001b[1;96m", - "biwhite": "\u001b[1;97m" - } - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:21.714424" - } -} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test deleted file mode 100644 index 8940d32d1e..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test +++ /dev/null @@ -1,29 +0,0 @@ -nextflow_workflow { - - name "Test Workflow UTILS_NFCORE_PIPELINE" - script "../main.nf" - config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" - workflow "UTILS_NFCORE_PIPELINE" - tag "subworkflows" - tag "subworkflows_nfcore" - tag "utils_nfcore_pipeline" - tag "subworkflows/utils_nfcore_pipeline" - - test("Should run without failures") { - - when { - workflow { - """ - input[0] = [] - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert snapshot(workflow.out).match() } - ) - } - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap deleted file mode 100644 index 859d1030fb..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap +++ /dev/null @@ -1,19 +0,0 @@ -{ - "Should run without failures": { - "content": [ - { - "0": [ - true - ], - "valid_config": [ - true - ] - } - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:25.726491" - } -} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config deleted file mode 100644 index d0a926bf6d..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config +++ /dev/null @@ -1,9 +0,0 @@ -manifest { - name = 'nextflow_workflow' - author = """nf-core""" - homePage = 'https://127.0.0.1' - description = """Dummy pipeline""" - nextflowVersion = '!>=23.04.0' - version = '9.9.9' - doi = 'https://doi.org/10.5281/zenodo.5070524' -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf deleted file mode 100644 index ee4738c8d1..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf +++ /dev/null @@ -1,74 +0,0 @@ -// -// Subworkflow that uses the nf-schema plugin to validate parameters and render the parameter summary -// - -include { paramsSummaryLog } from 'plugin/nf-schema' -include { validateParameters } from 'plugin/nf-schema' -include { paramsHelp } from 'plugin/nf-schema' - -workflow UTILS_NFSCHEMA_PLUGIN { - - take: - input_workflow // workflow: the workflow object used by nf-schema to get metadata from the workflow - validate_params // boolean: validate the parameters - parameters_schema // string: path to the parameters JSON schema. - // this has to be the same as the schema given to `validation.parametersSchema` - // when this input is empty it will automatically use the configured schema or - // "${projectDir}/nextflow_schema.json" as default. This input should not be empty - // for meta pipelines - help // boolean: show help message - help_full // boolean: show full help message - show_hidden // boolean: show hidden parameters in help message - before_text // string: text to show before the help message and parameters summary - after_text // string: text to show after the help message and parameters summary - command // string: an example command of the pipeline - - main: - - if(help || help_full) { - help_options = [ - beforeText: before_text, - afterText: after_text, - command: command, - showHidden: show_hidden, - fullHelp: help_full, - ] - if(parameters_schema) { - help_options << [parametersSchema: parameters_schema] - } - log.info paramsHelp( - help_options, - params.help instanceof String ? params.help : "", - ) - exit 0 - } - - // - // Print parameter summary to stdout. This will display the parameters - // that differ from the default given in the JSON schema - // - - summary_options = [:] - if(parameters_schema) { - summary_options << [parametersSchema: parameters_schema] - } - log.info before_text - log.info paramsSummaryLog(summary_options, input_workflow) - log.info after_text - - // - // Validate the parameters using nextflow_schema.json or the schema - // given via the validation.parametersSchema configuration option - // - if(validate_params) { - validateOptions = [:] - if(parameters_schema) { - validateOptions << [parametersSchema: parameters_schema] - } - validateParameters(validateOptions) - } - - emit: - dummy_emit = true -} - diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml deleted file mode 100644 index f7d9f02885..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml +++ /dev/null @@ -1,35 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json -name: "utils_nfschema_plugin" -description: Run nf-schema to validate parameters and create a summary of changed parameters -keywords: - - validation - - JSON schema - - plugin - - parameters - - summary -components: [] -input: - - input_workflow: - type: object - description: | - The workflow object of the used pipeline. - This object contains meta data used to create the params summary log - - validate_params: - type: boolean - description: Validate the parameters and error if invalid. - - parameters_schema: - type: string - description: | - Path to the parameters JSON schema. - This has to be the same as the schema given to the `validation.parametersSchema` config - option. When this input is empty it will automatically use the configured schema or - "${projectDir}/nextflow_schema.json" as default. The schema should not be given in this way - for meta pipelines. -output: - - dummy_emit: - type: boolean - description: Dummy emit to make nf-core subworkflows lint happy -authors: - - "@nvnieuwk" -maintainers: - - "@nvnieuwk" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test deleted file mode 100644 index c977917aac..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test +++ /dev/null @@ -1,173 +0,0 @@ -nextflow_workflow { - - name "Test Subworkflow UTILS_NFSCHEMA_PLUGIN" - script "../main.nf" - workflow "UTILS_NFSCHEMA_PLUGIN" - - tag "subworkflows" - tag "subworkflows_nfcore" - tag "subworkflows/utils_nfschema_plugin" - tag "plugin/nf-schema" - - config "./nextflow.config" - - test("Should run nothing") { - - when { - - params { - test_data = '' - } - - workflow { - """ - validate_params = false - input[0] = workflow - input[1] = validate_params - input[2] = "" - input[3] = false - input[4] = false - input[5] = false - input[6] = "" - input[7] = "" - input[8] = "" - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } - - test("Should validate params") { - - when { - - params { - test_data = '' - outdir = null - } - - workflow { - """ - validate_params = true - input[0] = workflow - input[1] = validate_params - input[2] = "" - input[3] = false - input[4] = false - input[5] = false - input[6] = "" - input[7] = "" - input[8] = "" - """ - } - } - - then { - assertAll( - { assert workflow.failed }, - { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } } - ) - } - } - - test("Should run nothing - custom schema") { - - when { - - params { - test_data = '' - } - - workflow { - """ - validate_params = false - input[0] = workflow - input[1] = validate_params - input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" - input[3] = false - input[4] = false - input[5] = false - input[6] = "" - input[7] = "" - input[8] = "" - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } - - test("Should validate params - custom schema") { - - when { - - params { - test_data = '' - outdir = null - } - - workflow { - """ - validate_params = true - input[0] = workflow - input[1] = validate_params - input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" - input[3] = false - input[4] = false - input[5] = false - input[6] = "" - input[7] = "" - input[8] = "" - """ - } - } - - then { - assertAll( - { assert workflow.failed }, - { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } } - ) - } - } - - test("Should create a help message") { - - when { - - params { - test_data = '' - outdir = null - } - - workflow { - """ - validate_params = true - input[0] = workflow - input[1] = validate_params - input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" - input[3] = true - input[4] = false - input[5] = false - input[6] = "Before" - input[7] = "After" - input[8] = "nextflow run test/test" - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config deleted file mode 100644 index 8d8c73718a..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config +++ /dev/null @@ -1,8 +0,0 @@ -plugins { - id "nf-schema@2.5.1" -} - -validation { - parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" - monochromeLogs = true -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json deleted file mode 100644 index 331e0d2f44..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", - "title": ". pipeline parameters", - "description": "", - "type": "object", - "$defs": { - "input_output_options": { - "title": "Input/output options", - "type": "object", - "fa_icon": "fas fa-terminal", - "description": "Define where the pipeline should find input data and save output data.", - "required": ["outdir"], - "properties": { - "validate_params": { - "type": "boolean", - "description": "Validate parameters?", - "default": true, - "hidden": true - }, - "outdir": { - "type": "string", - "format": "directory-path", - "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", - "fa_icon": "fas fa-folder-open" - }, - "test_data_base": { - "type": "string", - "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules", - "description": "Base for test data directory", - "hidden": true - }, - "test_data": { - "type": "string", - "description": "Fake test data param", - "hidden": true - } - } - }, - "generic_options": { - "title": "Generic options", - "type": "object", - "fa_icon": "fas fa-file-import", - "description": "Less common options for the pipeline, typically set in a config file.", - "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", - "properties": { - "help": { - "type": "boolean", - "description": "Display help text.", - "fa_icon": "fas fa-question-circle", - "hidden": true - }, - "version": { - "type": "boolean", - "description": "Display version and exit.", - "fa_icon": "fas fa-question-circle", - "hidden": true - }, - "logo": { - "type": "boolean", - "default": true, - "description": "Display nf-core logo in console output.", - "fa_icon": "fas fa-image", - "hidden": true - }, - "singularity_pull_docker_container": { - "type": "boolean", - "description": "Pull Singularity container from Docker?", - "hidden": true - }, - "publish_dir_mode": { - "type": "string", - "default": "copy", - "description": "Method used to save pipeline results to output directory.", - "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", - "fa_icon": "fas fa-copy", - "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], - "hidden": true - }, - "monochrome_logs": { - "type": "boolean", - "description": "Use monochrome_logs", - "hidden": true - } - } - } - }, - "allOf": [ - { - "$ref": "#/$defs/input_output_options" - }, - { - "$ref": "#/$defs/generic_options" - } - ] -} diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 6126f9ec69..22d360dfe0 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -12,8 +12,8 @@ include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endi {%- if nf_schema %} include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} {%- if multiqc %} -include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} -include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { paramsSummaryMultiqc } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} +include { softwareVersionsToYAML } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' {%- if citations or multiqc %} include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} {%- endif %} diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index 54ad7e7fc7..8d2a58ec6a 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -20,10 +20,7 @@ def test_subworkflows_lint(self): assert len(subworkflow_lint.warned) >= 0 def test_subworkflows_lint_empty(self): - """Test linting a pipeline with no subworkflows installed""" - self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfschema_plugin", force=True) + """Test linting a pipeline with no nf-core subworkflows installed.""" nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) assert "No subworkflows from https://github.com/nf-core/modules.git installed in pipeline" in self.caplog.text