From eb919fa96b4400bd6c658cc566fadc471dfd2516 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 25 Mar 2021 19:19:15 +0100 Subject: [PATCH 001/210] Bump to 1.14dev --- CHANGELOG.md | 4 ++++ setup.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 06ac98d0bd..21bd6b5e55 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # nf-core/tools: Changelog +## 1.14dev + +_..nothing yet.._ + ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] * Running tests twice with `nf-core modules create-test-yml` to catch unreproducible md5 sums [[#890](https://github.com/nf-core/tools/issues/890)] diff --git a/setup.py b/setup.py index 605e08406b..3d3130f46c 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -version = "1.13.3" +version = "1.14dev" with open("README.md") as f: readme = f.read() From 3a2c8158b856712cb78b285b0bbe5c23125bf984 Mon Sep 17 00:00:00 2001 From: phue Date: Fri, 26 Mar 2021 11:15:34 +0100 Subject: [PATCH 002/210] fix conda and podman profile strictness --- nf_core/pipeline-template/nextflow.config | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 8f73409af0..9c503fafc6 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -65,7 +65,7 @@ profiles { singularity.enabled = false podman.enabled = false shifter.enabled = false - charliecloud = false + charliecloud.enabled = false process.conda = "$projectDir/environment.yml" } debug { process.beforeScript = 'echo $HOSTNAME' } @@ -94,7 +94,7 @@ profiles { docker.enabled = false podman.enabled = true shifter.enabled = false - charliecloud = false + charliecloud.enabled = false } shifter { singularity.enabled = false From 866a60c9a5f544e4b354e02ad2b5d00d15e06419 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Fri, 26 Mar 2021 13:22:39 +0100 Subject: [PATCH 003/210] Replace max_time regex with @ewels and @kevinmenden new version --- nf_core/pipeline-template/nextflow_schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 5cfc02abdc..eb40ed5aa0 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -192,7 +192,7 @@ "description": "Maximum amount of time that can be requested for any single job.", "default": "240.h", "fa_icon": "far fa-clock", - "pattern": "^[\\d\\.]+\\.*(s|m|h|d)$", + "pattern": "^(\\d+(\\.\\d+)?(?:\\s*|\\.?)(s|m|h|d)\\s*)+$", "hidden": true, "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" } From 455b5ef1df931b592c9dcb156cc624f1c9fb7854 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Fri, 26 Mar 2021 13:23:46 +0100 Subject: [PATCH 004/210] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 21bd6b5e55..c51294a8cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ ## 1.14dev -_..nothing yet.._ +* Fix overly strict `--max_time` formatting regex [[#973](https://github.com/nf-core/tools/issues/973)] ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] From bac6c74937d7ff5c56b62c7c95cb654e3adab9db Mon Sep 17 00:00:00 2001 From: phue Date: Fri, 26 Mar 2021 13:40:48 +0100 Subject: [PATCH 005/210] update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 21bd6b5e55..f374652ac4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ ## 1.14dev -_..nothing yet.._ +* Fixed an issue in the pipeline template regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] From 624b3609217a2aaecd26e6ad664e1fb8325c6028 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 26 Mar 2021 14:09:10 +0100 Subject: [PATCH 006/210] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c91911ee04..9adef35718 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ ## 1.14dev * Fixed an issue in the pipeline template regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] -* Fix overly strict `--max_time` formatting regex [[#973](https://github.com/nf-core/tools/issues/973)] +* Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] From 6257e1a37f97337fa4fff182f0fccaa1ef72fc6c Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 26 Mar 2021 15:44:59 +0100 Subject: [PATCH 007/210] Remove trailing slash from params.igenomes_base --- CHANGELOG.md | 4 ++++ nf_core/pipeline-template/nextflow.config | 2 +- nf_core/pipeline-template/nextflow_schema.json | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9adef35718..68be251723 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ * Fixed an issue in the pipeline template regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +### Template + +* Removed trailing slash from `params.igenomes_base` to yield valid s3 paths (previous paths work with Nextflow but not aws cli) + ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] * Running tests twice with `nf-core modules create-test-yml` to catch unreproducible md5 sums [[#890](https://github.com/nf-core/tools/issues/890)] diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 9c503fafc6..480d739919 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -25,7 +25,7 @@ params { plaintext_email = false monochrome_logs = false help = false - igenomes_base = 's3://ngi-igenomes/igenomes/' + igenomes_base = 's3://ngi-igenomes/igenomes' tracedir = "${params.outdir}/pipeline_info" igenomes_ignore = false custom_config_version = 'master' diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index eb40ed5aa0..786de002bb 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -62,7 +62,7 @@ "igenomes_base": { "type": "string", "description": "Directory / URL base for iGenomes references.", - "default": "s3://ngi-igenomes/igenomes/", + "default": "s3://ngi-igenomes/igenomes", "fa_icon": "fas fa-cloud-download-alt", "hidden": true }, From cfcc147f57366cbed5c4c3ddcceba505c263d392 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 26 Mar 2021 22:10:42 +0100 Subject: [PATCH 008/210] Launch: Strip unset params with no defaults. * If a param doesn't have a default in the schema and the input is False / None / , strip it * Sanitise the default values in the schema so that they are the correct type * Don't surround command-line params with quotes if numeric Fixes nf-core/tools#976 --- nf_core/launch.py | 22 ++++++++++++++++++---- nf_core/schema.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 4 deletions(-) diff --git a/nf_core/launch.py b/nf_core/launch.py index 42a1ec2014..ce571f373c 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -620,10 +620,21 @@ def print_param_header(self, param_id, param_obj, is_group=False): def strip_default_params(self): """ Strip parameters if they have not changed from the default """ - # Schema defaults - for param_id, val in self.schema_obj.schema_defaults.items(): - if self.schema_obj.input_params.get(param_id) == val: - del self.schema_obj.input_params[param_id] + # Go through each supplied parameter (force list so we can delete in the loop) + for param_id in list(self.schema_obj.input_params.keys()): + val = self.schema_obj.input_params[param_id] + + # Params with a schema default + if param_id in self.schema_obj.schema_defaults: + # Strip if param is same as the schema default + if val == self.schema_obj.schema_defaults[param_id]: + del self.schema_obj.input_params[param_id] + + # Params with no schema default + else: + # Strip if param is empty + if val is False or val is None or val == "": + del self.schema_obj.input_params[param_id] # Nextflow flag defaults for param_id, val in self.nxf_flag_schema["coreNextflow"]["properties"].items(): @@ -657,6 +668,9 @@ def build_command(self): # Boolean flags like --saveTrimmed if isinstance(val, bool) and val: self.nextflow_cmd += " --{}".format(param) + # No quotes for numbers + elif (isinstance(val, int) or isinstance(val, float)) and val: + self.nextflow_cmd += " --{} {}".format(param, str(val).replace('"', '\\"')) # everything else else: self.nextflow_cmd += ' --{} "{}"'.format(param, str(val).replace('"', '\\"')) diff --git a/nf_core/schema.py b/nf_core/schema.py index 697e52b2b0..f11c7a0793 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -99,6 +99,37 @@ def load_schema(self): self.schema_params = [] log.debug("JSON file loaded: {}".format(self.schema_filename)) + def sanitise_param_default(self, param): + """ + Given a param, ensure that the default value it is the correct variable type + """ + if "type" not in param or "default" not in param: + return param + + # Bools + if param["type"] == "boolean": + if not isinstance(param["default"], bool): + param["default"] = param["default"] == "true" + return param + + # For everything else, an empty string is an empty string + if isinstance(param["default"], str) and param["default"].strip() == "": + return "" + + # Integers + if param["type"] == "integer": + param["default"] = int(param["default"]) + return param + + # Numbers + if param["type"] == "number": + param["default"] = float(param["default"]) + return param + + # Strings + param["default"] = str(param["default"]) + return param + def get_schema_defaults(self): """ Generate set of default input parameters from schema. @@ -110,6 +141,7 @@ def get_schema_defaults(self): for p_key, param in self.schema.get("properties", {}).items(): self.schema_params.append(p_key) if "default" in param: + param = self.sanitise_param_default(param) self.schema_defaults[p_key] = param["default"] # Grouped schema properties in subschema definitions @@ -117,6 +149,7 @@ def get_schema_defaults(self): for p_key, param in definition.get("properties", {}).items(): self.schema_params.append(p_key) if "default" in param: + param = self.sanitise_param_default(param) self.schema_defaults[p_key] = param["default"] def save_schema(self): From 59c2b71c446e9a5987ab0c97a3e3f64fd6c2f1ee Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 26 Mar 2021 22:12:03 +0100 Subject: [PATCH 009/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68be251723..3b352c8d3d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ * Fixed an issue in the pipeline template regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +* Strip values from `nf-core launch` web response which are False and have no default in the schema [[#976](https://github.com/nf-core/tools/issues/976)] ### Template From 37ba793960cdff610052892cec43820ef2edaaf1 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 26 Mar 2021 22:54:35 +0100 Subject: [PATCH 010/210] Sync: Try to fix the GitHub abuse mechanism coping code * Fixes main bug that the default value for the Retry-After header was an int and not a str * Make the default wait random, between 10 - 60 seconds * Some tweaks, print the headers in more log messages, make them prettier. Fixes nf-core/tools#970 --- nf_core/sync.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/nf_core/sync.py b/nf_core/sync.py index ee8893089c..f198f90ca2 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -6,6 +6,7 @@ import json import logging import os +import random import re import requests import requests_cache @@ -331,29 +332,34 @@ def make_pull_request(self): try: self.gh_pr_returned_data = json.loads(r.content) returned_data_prettyprint = json.dumps(self.gh_pr_returned_data, indent=4) + r_headers_pp = json.dumps(r.headers, indent=4) except: self.gh_pr_returned_data = r.content returned_data_prettyprint = r.content + r_headers_pp = r.headers # PR worked if r.status_code == 201: self.pr_url = self.gh_pr_returned_data["html_url"] - log.debug(f"GitHub API PR worked:\n{returned_data_prettyprint}") + log.debug(f"GitHub API PR worked:\n{returned_data_prettyprint}\n\n{r_headers_pp}") log.info(f"GitHub PR created: {self.gh_pr_returned_data['html_url']}") break # Returned 403 error - too many simultaneous requests # https://github.com/nf-core/tools/issues/911 if r.status_code == 403: - log.debug(f"GitHub API PR failed with 403 error:\n{returned_data_prettyprint}\n\n{r.headers}") - wait_time = float(re.sub("[^0-9]", "", r.headers.get("Retry-After", 30))) + log.debug(f"GitHub API PR failed with 403 error:\n{returned_data_prettyprint}\n\n{r_headers_pp}") + wait_time = float(re.sub("[^0-9]", "", str(r.headers.get("Retry-After", 0)))) + if wait_time == 0: + log.debug("Couldn't find 'Retry-After' header, guessing a length of time to wait") + wait_time = random.randrange(10, 60) log.warning(f"Got 403 code - probably the abuse protection. Trying again after {wait_time} seconds..") time.sleep(wait_time) # Something went wrong else: raise PullRequestException( - f"GitHub API returned code {r.status_code}: \n\n{returned_data_prettyprint}\n\n{r.headers}" + f"GitHub API returned code {r.status_code}: \n\n{returned_data_prettyprint}\n\n{r_headers_pp}" ) def close_open_template_merge_prs(self): From 0971ef9d970b181af78b7fcfc6257bc17859322f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 26 Mar 2021 22:55:41 +0100 Subject: [PATCH 011/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68be251723..262c9a9557 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ * Fixed an issue in the pipeline template regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +* Try to fix the fix for the automated sync when we submit too many PRs at once [[#970](https://github.com/nf-core/tools/issues/970)] ### Template From e8de348992b6d75a2ff2925a1835b2cd2c83cb26 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 26 Mar 2021 23:53:04 +0100 Subject: [PATCH 012/210] Rewrote schema summary output function. * Don't set defaults to empty strings if no default in the schema * Fix logic for printing a param to the summary if there is no default * Rewrite the colour for the summary log * Other minor tidying Fixes nf-core/tools#971 --- .../pipeline-template/lib/NfcoreSchema.groovy | 42 +++++++++---------- 1 file changed, 19 insertions(+), 23 deletions(-) diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy index 54935ec818..adfb2aec31 100644 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ b/nf_core/pipeline-template/lib/NfcoreSchema.groovy @@ -191,11 +191,11 @@ class NfcoreSchema { // Remove an element from a JSONArray private static JSONArray removeElement(jsonArray, element){ - def list = [] + def list = [] int len = jsonArray.length() - for (int i=0;i Date: Fri, 26 Mar 2021 23:56:03 +0100 Subject: [PATCH 013/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68be251723..ee3953f8de 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ * Fixed an issue in the pipeline template regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +* Rewrite the `params_summary_log()` function to properly ignore unset params and have nicer formatting [[#971](https://github.com/nf-core/tools/issues/971)] ### Template From 71bc6c60e249dec9b9ba3869a7038447b1f432e0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 27 Mar 2021 13:46:01 +0100 Subject: [PATCH 014/210] Fix docs URL and add mention of config file to nf-core lint cli help --- nf_core/__main__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index fe4932759b..254763056d 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -315,7 +315,10 @@ def lint(pipeline_dir, release, fix, show_passed, fail_ignored, markdown, json): Runs a large number of automated tests to ensure that the supplied pipeline meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: https://nf-co.re/errors + on the nf-core website: https://nf-co.re/tools-docs/ + + You can ignore tests using a file called .nf-core-lint.yaml (if you have a good reason!). + See the documentation for details. """ # Run the lint tests! From 4ecdfba8a43c025f877c03fdd0248fc9517571fd Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 27 Mar 2021 22:06:16 +0100 Subject: [PATCH 015/210] Add overwrite=true to trace reports etc for test configs --- nf_core/pipeline-template/conf/test.config | 6 ++++++ nf_core/pipeline-template/conf/test_full.config | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index ae2c3f262a..b383b8ad31 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -26,3 +26,9 @@ params { // Ignore `--input` as otherwise the parameter validation will throw an error schema_ignore_params = 'genomes,input_paths,input' } + +// Overwrite trace reports etc - allows '-resume' on AWS +timeline.overwrite = true +report.overwrite = true +trace.overwrite = true +dag.overwrite = true diff --git a/nf_core/pipeline-template/conf/test_full.config b/nf_core/pipeline-template/conf/test_full.config index 83e98e01ff..00d373da31 100644 --- a/nf_core/pipeline-template/conf/test_full.config +++ b/nf_core/pipeline-template/conf/test_full.config @@ -22,3 +22,9 @@ params { // Ignore `--input` as otherwise the parameter validation will throw an error schema_ignore_params = 'genomes,input_paths,input' } + +// Overwrite trace reports etc - allows '-resume' on AWS +timeline.overwrite = true +report.overwrite = true +trace.overwrite = true +dag.overwrite = true From 19f39ee57331f777684dac378e55d6f580e3bc04 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 29 Mar 2021 13:15:46 +0200 Subject: [PATCH 016/210] update module commans with new format --- nf_core/module-template/tests/test.yml | 2 +- nf_core/modules/lint.py | 1 + nf_core/modules/test_yml_builder.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/module-template/tests/test.yml b/nf_core/module-template/tests/test.yml index 17171d8a89..ba4216b206 100644 --- a/nf_core/module-template/tests/test.yml +++ b/nf_core/module-template/tests/test.yml @@ -5,7 +5,7 @@ tags: - {{ tool }} {%- if subtool %} - - {{ tool_name }} + - {{ tool }}/{{ subtool }} {%- endif %} files: - path: output/{{ tool }}/test.bam diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index c5dc501a5b..551d9720b2 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -519,6 +519,7 @@ def lint_module_tests(self): # Lint the test.yml file try: with open(self.test_yml, "r") as fh: + # TODO: verify that the tags are correct test_yml = yaml.safe_load(fh) self.passed.append(("test_yml_exists", "Test `test.yml` exists", self.test_yml)) except FileNotFoundError: diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index 0abb5cd719..255e1ecf64 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -170,7 +170,7 @@ def build_single_test(self, entry_point): mod_name_parts = self.module_name.split("/") tag_defaults = [] for idx in range(0, len(mod_name_parts)): - tag_defaults.append("_".join(mod_name_parts[: idx + 1])) + tag_defaults.append("/".join(mod_name_parts[: idx + 1])) tag_defaults.append(entry_point.replace("test_", "")) # Remove duplicates tag_defaults = list(set(tag_defaults)) From 09b95df448e83d728c76612da7ae7d85b8338b6b Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 29 Mar 2021 13:19:51 +0200 Subject: [PATCH 017/210] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68be251723..56d3b41b51 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ * Fixed an issue in the pipeline template regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +* Update `modules` commands to use new test tag format `tool/subtool` ### Template From 7b3979896f722fd55efd903be1b66094e0b174b7 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 29 Mar 2021 22:30:08 +0200 Subject: [PATCH 018/210] Update nf_core/schema.py Co-authored-by: Kevin Menden --- nf_core/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/schema.py b/nf_core/schema.py index f11c7a0793..5196bcd8fb 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -101,7 +101,7 @@ def load_schema(self): def sanitise_param_default(self, param): """ - Given a param, ensure that the default value it is the correct variable type + Given a param, ensure that the default value is the correct variable type """ if "type" not in param or "default" not in param: return param From 30fe95a06dee9ef92c9729000671a949d206f0a0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 30 Mar 2021 09:07:16 +0200 Subject: [PATCH 019/210] Update nf_core/pipeline-template/conf/test_full.config MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Patrick Hüther --- nf_core/pipeline-template/conf/test_full.config | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/conf/test_full.config b/nf_core/pipeline-template/conf/test_full.config index 00d373da31..160fbdd8f0 100644 --- a/nf_core/pipeline-template/conf/test_full.config +++ b/nf_core/pipeline-template/conf/test_full.config @@ -24,7 +24,8 @@ params { } // Overwrite trace reports etc - allows '-resume' on AWS -timeline.overwrite = true -report.overwrite = true -trace.overwrite = true -dag.overwrite = true +def timeStamp = new java.util.Date().format( 'yyyy-MM-dd_HH:mm:ss') +timeline.file = "${params.tracedir}/execution_timeline_${timeStamp}.html" +report.file = "${params.tracedir}/execution_report_${timeStamp}.html" +trace.file = "${params.tracedir}/execution_trace_${timeStamp}.txt" +dag.file = "${params.tracedir}/pipeline_dag_${timeStamp}.svg" From 3b340d47e1a894769021a34ddbc53f6a421b1e14 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 30 Mar 2021 09:38:23 +0200 Subject: [PATCH 020/210] Update trace + timeline + report + dag filenames to include timestamp --- nf_core/pipeline-template/conf/test.config | 6 ------ nf_core/pipeline-template/conf/test_full.config | 7 ------- nf_core/pipeline-template/nextflow.config | 9 +++++---- 3 files changed, 5 insertions(+), 17 deletions(-) diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index b383b8ad31..ae2c3f262a 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -26,9 +26,3 @@ params { // Ignore `--input` as otherwise the parameter validation will throw an error schema_ignore_params = 'genomes,input_paths,input' } - -// Overwrite trace reports etc - allows '-resume' on AWS -timeline.overwrite = true -report.overwrite = true -trace.overwrite = true -dag.overwrite = true diff --git a/nf_core/pipeline-template/conf/test_full.config b/nf_core/pipeline-template/conf/test_full.config index 160fbdd8f0..83e98e01ff 100644 --- a/nf_core/pipeline-template/conf/test_full.config +++ b/nf_core/pipeline-template/conf/test_full.config @@ -22,10 +22,3 @@ params { // Ignore `--input` as otherwise the parameter validation will throw an error schema_ignore_params = 'genomes,input_paths,input' } - -// Overwrite trace reports etc - allows '-resume' on AWS -def timeStamp = new java.util.Date().format( 'yyyy-MM-dd_HH:mm:ss') -timeline.file = "${params.tracedir}/execution_timeline_${timeStamp}.html" -report.file = "${params.tracedir}/execution_report_${timeStamp}.html" -trace.file = "${params.tracedir}/execution_trace_${timeStamp}.txt" -dag.file = "${params.tracedir}/pipeline_dag_${timeStamp}.svg" diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 480d739919..72949ee138 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -129,21 +129,22 @@ env { // Capture exit codes from upstream processes when piping process.shell = ['/bin/bash', '-euo', 'pipefail'] +def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true - file = "${params.tracedir}/execution_timeline.html" + file = "${params.tracedir}/execution_timeline_${trace_timestamp}.html" } report { enabled = true - file = "${params.tracedir}/execution_report.html" + file = "${params.tracedir}/execution_report_${trace_timestamp}.html" } trace { enabled = true - file = "${params.tracedir}/execution_trace.txt" + file = "${params.tracedir}/execution_trace_${trace_timestamp}.txt" } dag { enabled = true - file = "${params.tracedir}/pipeline_dag.svg" + file = "${params.tracedir}/pipeline_dag_${trace_timestamp}.svg" } manifest { From 1960a250902db4e2b48c72bbb4cd9fecc9da437d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 30 Mar 2021 09:40:28 +0200 Subject: [PATCH 021/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68be251723..9a1beb58ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ ### Template * Removed trailing slash from `params.igenomes_base` to yield valid s3 paths (previous paths work with Nextflow but not aws cli) +* Added a timestamp to the trace + timetime + report + dag filenames to fix overwrite issue on AWS ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] From d399c6f89d4141e05332e8326b0bed315c99932c Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Wed, 31 Mar 2021 10:29:45 +0200 Subject: [PATCH 022/210] adding some tests for create-test-yml --- tests/test_modules.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_modules.py b/tests/test_modules.py index b4809dd270..0e91353e93 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -110,3 +110,23 @@ def test_modules_create_fail_exists(self): with pytest.raises(UserWarning) as excinfo: module_create.create() assert "Module file exists already" in str(excinfo.value) + + def test_modules_custom_yml_dumper(self): + """ Try to create a yml file with the custom yml dumper """ + out_dir = tempfile.mkdtemp() + yml_output_path = os.path.join(out_dir, "test.yml") + meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) + meta_builder.test_yml_output_path = yml_output_path + meta_builder.tests = [{"testname": "myname"}] + meta_builder.print_test_yml() + assert os.path.isfile(yml_output_path) + + def test_modules_test_file_dict(self): + """ Creat dict of test files and create md5 sums """ + test_file_dir = tempfile.mkdtemp() + meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) + with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: + fh.write("this line is just for testing") + test_files = meta_builder.create_test_file_dict(test_file_dir) + assert len(test_files) == 1 + assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" From 3fbe07fe7637dc97fd01c2170e6fcb6e306d4661 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 31 Mar 2021 22:38:43 +0200 Subject: [PATCH 023/210] Fix list indentation for correct GitHub rendering --- nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 64a0d62673..40c3a20ab1 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -10,14 +10,15 @@ Remember that PRs should be made against the dev branch, unless you're preparing Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) --> + ## PR checklist - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! - - [ ] If you've added a new tool - add to the software_versions process and a regex to `scrape_software_versions.py` - - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) - - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. + - [ ] If you've added a new tool - add to the software_versions process and a regex to `scrape_software_versions.py` + - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs]({{ name }}/tree/master/.github/CONTRIBUTING.md) + - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. - [ ] Make sure your code lints (`nf-core lint .`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). - [ ] Usage Documentation in `docs/usage.md` is updated. From e2094ab66bd0a9750a49941b78fb9f76deadd998 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 31 Mar 2021 22:38:57 +0200 Subject: [PATCH 024/210] Schema build - add exception type to handle --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 12fcfd9cab..5a60e579da 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -345,7 +345,7 @@ def poll_nfcore_web_api(api_url, post_data=None): try: web_response = json.loads(response.content) assert "status" in web_response - except (json.decoder.JSONDecodeError, AssertionError) as e: + except (json.decoder.JSONDecodeError, AssertionError, TypeError) as e: log.debug("Response content:\n{}".format(response.content)) raise AssertionError( "nf-core website API results response not recognised: {}\n See verbose log for full response".format( From e700e0bac7f82454335ccc2c6b35e9bf3b19af7e Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 1 Apr 2021 09:11:31 +0200 Subject: [PATCH 025/210] added more tests for modules create and lint --- tests/test_modules.py | 45 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/tests/test_modules.py b/tests/test_modules.py index 0e91353e93..c55158998c 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -2,6 +2,7 @@ """ Tests covering the modules commands """ +from nf_core.modules import create import nf_core.modules import os @@ -12,6 +13,21 @@ from rich.console import Console +def create_modules_repo_dummy(): + """ Create a dummy copy of the nf-core/modules repo """ + root_dir = tempfile.mkdtemp() + os.mkdir(os.path.join(root_dir, "software")) + os.makedirs(os.path.join(root_dir, "tests", "software")) + os.makedirs(os.path.join(root_dir, "tests", "config")) + with open(os.path.join(root_dir, "tests", "config", "pytest_software.yml"), "w") as fh: + fh.writelines(["test:", "\n - software/test/**", "\n - tests/software/test/**"]) + + module_create = nf_core.modules.ModuleCreate(root_dir, "star/align", "@author", "process_medium", False, False) + module_create.create() + + return root_dir + + class TestModules(unittest.TestCase): """Class for modules tests""" @@ -25,6 +41,9 @@ def setUp(self): self.mods = nf_core.modules.PipelineModules() self.mods.pipeline_dir = self.pipeline_dir + # Set up the nf-core/modules repo dummy + self.nfcore_modules = create_modules_repo_dummy() + def test_modulesrepo_class(self): """ Initialise a modules repo object """ modrepo = nf_core.modules.ModulesRepo() @@ -95,6 +114,14 @@ def test_modules_lint_empty(self): assert len(module_lint.warned) == 0 assert len(module_lint.failed) == 0 + def test_modules_lint_new_modules(self): + """ lint all modules in nf-core/modules repo clone """ + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.passed) == 16 + assert len(module_lint.warned) == 24 + assert len(module_lint.failed) == 0 + def test_modules_create_succeed(self): """ Succeed at creating the FastQC module """ module_create = nf_core.modules.ModuleCreate(self.pipeline_dir, "fastqc", "@author", "process_low", True, True) @@ -130,3 +157,21 @@ def test_modules_test_file_dict(self): test_files = meta_builder.create_test_file_dict(test_file_dir) assert len(test_files) == 1 assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + + def test_modules_create_nfcore_modules(self): + """ Create a module in nf-core/modules clone """ + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "fastqc", "@author", "process_low", False, False + ) + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "software", "fastqc", "main.nf")) + assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "software", "fastqc", "main.nf")) + + def test_modules_create_nfcore_modules_subtool(self): + """ Create a tool/subtool module in a nf-core/modules clone """ + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "star/index", "@author", "process_medium", False, False + ) + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "software", "star", "index", "main.nf")) + assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "software", "star", "index", "main.nf")) From df62dff3ef5b6a1dda92f0fdf24eed51f9775d66 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 1 Apr 2021 09:35:14 +0200 Subject: [PATCH 026/210] added test for md5sums --- nf_core/modules/test_yml_builder.py | 3 +-- tests/test_modules.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index 0abb5cd719..d9d1f910a8 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -211,14 +211,13 @@ def create_test_file_dict(self, results_dir): return test_files - def get_md5_sums(self, entry_point, command): + def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repeat=None): """ Recursively go through directories and subdirectories and generate tuples of (, ) returns: list of tuples """ - results_dir = None run_this_test = False while results_dir is None: if self.run_tests or run_this_test: diff --git a/tests/test_modules.py b/tests/test_modules.py index c55158998c..6d695ab9d0 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -158,6 +158,17 @@ def test_modules_test_file_dict(self): assert len(test_files) == 1 assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + def test_modules_create_test_yml_get_md5(self): + """ Get md5 sums from a dummy output """ + test_file_dir = tempfile.mkdtemp() + meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) + with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: + fh.write("this line is just for testing") + test_files = meta_builder.get_md5_sums( + entry_point="dummy", command="dummy", results_dir=test_file_dir, results_dir_repeat=test_file_dir + ) + assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + def test_modules_create_nfcore_modules(self): """ Create a module in nf-core/modules clone """ module_create = nf_core.modules.ModuleCreate( From 379cbe386ca4bf2076bf20ed454b3977310c2b8c Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 1 Apr 2021 13:38:55 +0200 Subject: [PATCH 027/210] add more tests for test_yml_builder --- tests/test_modules.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index 6d695ab9d0..5edd1c79c7 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -117,7 +117,7 @@ def test_modules_lint_empty(self): def test_modules_lint_new_modules(self): """ lint all modules in nf-core/modules repo clone """ module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, all_modules=True) + module_lint.lint(print_results=True, all_modules=True) assert len(module_lint.passed) == 16 assert len(module_lint.warned) == 24 assert len(module_lint.failed) == 0 @@ -169,6 +169,28 @@ def test_modules_create_test_yml_get_md5(self): ) assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + def test_modules_create_test_yml_entry_points(self): + """ Test extracting test entry points from a main.nf file""" + meta_builder = nf_core.modules.ModulesTestYmlBuilder("star/align", False, "./", False, True) + meta_builder.module_test_main = os.path.join( + self.nfcore_modules, "tests", "software", "star", "align", "main.nf" + ) + meta_builder.scrape_workflow_entry_points() + assert meta_builder.entry_points[0] == "test_star_align" + + def test_modules_create_test_yml_check_inputs(self): + """ Test the check_inputs() function - raise UserWarning because test.yml exists """ + cwd = os.getcwd() + os.chdir(self.nfcore_modules) + meta_builder = nf_core.modules.ModulesTestYmlBuilder("star/align", False, "./", False, True) + meta_builder.module_test_main = os.path.join( + self.nfcore_modules, "tests", "software", "star", "align", "main.nf" + ) + with pytest.raises(UserWarning) as excinfo: + meta_builder.check_inputs() + os.chdir(cwd) + assert "Test YAML file already exists!" in str(excinfo.value) + def test_modules_create_nfcore_modules(self): """ Create a module in nf-core/modules clone """ module_create = nf_core.modules.ModuleCreate( From 9e1065e6b0de03966d8a217d36c05f4ed73fee7b Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Tue, 6 Apr 2021 15:08:18 +0200 Subject: [PATCH 028/210] added conda-name flag to modules create --- nf_core/__main__.py | 7 +++++-- nf_core/modules/create.py | 6 ++++-- tests/test_modules.py | 8 ++++++++ 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 254763056d..b8f34b2e63 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -444,7 +444,8 @@ def remove(ctx, pipeline_dir, tool): @click.option("-m", "--meta", is_flag=True, default=False, help="Use Groovy meta map for sample information") @click.option("-n", "--no-meta", is_flag=True, default=False, help="Don't use meta map for sample information") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") -def create_module(ctx, directory, tool, author, label, meta, no_meta, force): +@click.option("-c", "--conda-name", type=str, default=None, help="Name of the conda package to use") +def create_module(ctx, directory, tool, author, label, meta, no_meta, force, conda_name): """ Create a new DSL2 module from the nf-core template. @@ -465,7 +466,9 @@ def create_module(ctx, directory, tool, author, label, meta, no_meta, force): # Run function try: - module_create = nf_core.modules.ModuleCreate(directory, tool, author, label, has_meta, force) + module_create = nf_core.modules.ModuleCreate( + directory, tool, author, label, has_meta, force, conda_name=conda_name + ) module_create.create() except UserWarning as e: log.critical(e) diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index ed7f778bfc..fbc255deb4 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -24,7 +24,9 @@ class ModuleCreate(object): - def __init__(self, directory=".", tool="", author=None, process_label=None, has_meta=None, force=False): + def __init__( + self, directory=".", tool="", author=None, process_label=None, has_meta=None, force=False, conda_name=None + ): self.directory = directory self.tool = tool self.author = author @@ -32,7 +34,7 @@ def __init__(self, directory=".", tool="", author=None, process_label=None, has_ self.has_meta = has_meta self.force_overwrite = force - self.tool_conda_name = None + self.tool_conda_name = conda_name self.subtool = None self.tool_licence = None self.repo_type = None diff --git a/tests/test_modules.py b/tests/test_modules.py index b4809dd270..d1c59b3538 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -101,6 +101,14 @@ def test_modules_create_succeed(self): module_create.create() assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "fastqc.nf")) + def test_modules_create_different_conda_name(self): + """ Test creating a new module using a different conda package name """ + module_create = nf_core.modules.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_low", True, True, conda_name="trim-galore" + ) + module_create.create() + assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) + def test_modules_create_fail_exists(self): """ Fail at creating the same module twice""" module_create = nf_core.modules.ModuleCreate( From e5d7bf0b1acc55e0b851d7c48c3219439323427f Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Tue, 6 Apr 2021 15:10:14 +0200 Subject: [PATCH 029/210] updated changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f64d60e348..7c00bd6429 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ * Strip values from `nf-core launch` web response which are False and have no default in the schema [[#976](https://github.com/nf-core/tools/issues/976)] * Try to fix the fix for the automated sync when we submit too many PRs at once [[#970](https://github.com/nf-core/tools/issues/970)] +* Added `--conda-name` flag to `nf-core modules create` command to allow sidestepping questionary [[#988](https://github.com/nf-core/tools/issues/988)] ### Template From 45fedd424e2219a3bd1e0528a616fd118728d0bc Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Tue, 6 Apr 2021 17:07:12 +0200 Subject: [PATCH 030/210] Added autocomplete prompt if pipeline name is not specified --- nf_core/__main__.py | 2 +- nf_core/download.py | 18 +++++++++++++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 254763056d..27c9473b9c 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -201,7 +201,7 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all # nf-core download @nf_core_cli.command(help_priority=3) -@click.argument("pipeline", required=True, metavar="") +@click.argument("pipeline", required=False, metavar="") @click.option("-r", "--release", type=str, help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( diff --git a/nf_core/download.py b/nf_core/download.py index 3591b424ab..a5e0a88e6e 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -8,6 +8,7 @@ import logging import hashlib import os +import questionary import re import requests import requests_cache @@ -74,7 +75,7 @@ class DownloadWorkflow(object): def __init__( self, - pipeline, + pipeline=None, release=None, outdir=None, compress_type="tar.gz", @@ -108,9 +109,21 @@ def __init__( def download_workflow(self): """Starts a nf-core workflow download.""" + # Fetches remote workflows + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + + # Prompts user if pipeline name was not specified + if self.pipeline is None: + self.pipeline = questionary.autocomplete( + "Pipeline name:", + choices=[wf.name for wf in wfs.remote_workflows], + style=nf_core.utils.nfcore_question_style, + ).ask() + # Get workflow details try: - self.fetch_workflow_details(nf_core.list.Workflows()) + self.fetch_workflow_details(wfs) except LookupError: sys.exit(1) @@ -175,7 +188,6 @@ def fetch_workflow_details(self, wfs): Raises: LockupError, if the pipeline can not be found. """ - wfs.get_remote_workflows() # Get workflow download details for wf in wfs.remote_workflows: From 5a2ea37b41843131a53db928b2fef4478f6b05d0 Mon Sep 17 00:00:00 2001 From: Kevin Menden Date: Wed, 7 Apr 2021 07:25:31 +0200 Subject: [PATCH 031/210] Update nf_core/__main__.py Co-authored-by: Harshil Patel --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index b8f34b2e63..6a8b8a6f96 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -467,7 +467,7 @@ def create_module(ctx, directory, tool, author, label, meta, no_meta, force, con # Run function try: module_create = nf_core.modules.ModuleCreate( - directory, tool, author, label, has_meta, force, conda_name=conda_name + directory, tool, author, label, has_meta, force, conda_name ) module_create.create() except UserWarning as e: From 2aee90655ce9c199f933ed6ed5c4f7825a59b8da Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Wed, 7 Apr 2021 09:33:17 +0200 Subject: [PATCH 032/210] remove redundant test --- tests/test_modules.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index d1c59b3538..b4809dd270 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -101,14 +101,6 @@ def test_modules_create_succeed(self): module_create.create() assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "fastqc.nf")) - def test_modules_create_different_conda_name(self): - """ Test creating a new module using a different conda package name """ - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_low", True, True, conda_name="trim-galore" - ) - module_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) - def test_modules_create_fail_exists(self): """ Fail at creating the same module twice""" module_create = nf_core.modules.ModuleCreate( From c2ff72ec1e388e3bf77852800a8132a2113e1e89 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Wed, 7 Apr 2021 09:46:50 +0200 Subject: [PATCH 033/210] black --- nf_core/__main__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 6a8b8a6f96..350d512055 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -466,9 +466,7 @@ def create_module(ctx, directory, tool, author, label, meta, no_meta, force, con # Run function try: - module_create = nf_core.modules.ModuleCreate( - directory, tool, author, label, has_meta, force, conda_name - ) + module_create = nf_core.modules.ModuleCreate(directory, tool, author, label, has_meta, force, conda_name) module_create.create() except UserWarning as e: log.critical(e) From 39b31063b27714110082e8dcabd400b76c8a5c57 Mon Sep 17 00:00:00 2001 From: Erik Danielsson <53212377+ErikDanielsson@users.noreply.github.com> Date: Wed, 7 Apr 2021 12:54:08 +0200 Subject: [PATCH 034/210] Update nf_core/__main__.py Co-authored-by: Phil Ewels --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 27c9473b9c..2bb9e70139 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -201,7 +201,7 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all # nf-core download @nf_core_cli.command(help_priority=3) -@click.argument("pipeline", required=False, metavar="") +@click.argument("pipeline", metavar="") @click.option("-r", "--release", type=str, help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( From dbee5f0cdfa0d4e6da30dcc23b5e2ff96769c751 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 8 Apr 2021 10:23:54 +0200 Subject: [PATCH 035/210] fix markdownlint --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 108e1b5188..8afd510d9e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,6 @@ ## 1.14dev - ### Tools * Strip values from `nf-core launch` web response which are False and have no default in the schema [[#976](https://github.com/nf-core/tools/issues/976)] From 58ed2bfc4c7fdb2464332e7d97f37e16b9e2a4b7 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 8 Apr 2021 10:24:08 +0200 Subject: [PATCH 036/210] Added test for 'files_unchanged.py' --- tests/lint/files_unchanged.py | 29 +++++++++++++++++++++++++++++ tests/test_lint.py | 9 +++++++++ 2 files changed, 38 insertions(+) create mode 100644 tests/lint/files_unchanged.py diff --git a/tests/lint/files_unchanged.py b/tests/lint/files_unchanged.py new file mode 100644 index 0000000000..abf974b350 --- /dev/null +++ b/tests/lint/files_unchanged.py @@ -0,0 +1,29 @@ +import pytest +import shutil +import tempfile +import os + +import nf_core.lint + + +def test_files_unchanged_pass(self): + self.lint_obj._load() + results = self.lint_obj.files_unchanged() + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + assert not results.get("could_fix", True) + + +def test_files_unchanged_fail(self): + failing_file = os.path.join(".github", "CONTRIBUTING.md") + new_pipeline = self._make_pipeline_copy() + with open(os.path.join(new_pipeline, failing_file), "a") as fh: + fh.write("THIS SHOULD NOT BE HERE") + + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + results = lint_obj.files_unchanged() + assert len(results["failed"]) == 1 + assert failing_file in results["failed"][0] + assert results["could_fix"] diff --git a/tests/test_lint.py b/tests/test_lint.py index 6a2aadea87..ce94802cf3 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -202,6 +202,15 @@ def test_sphinx_rst_files(self): from lint.merge_markers import test_merge_markers_found + from lint.nextflow_config import ( + test_config_variable_example_pass, + ) + + from lint.files_unchanged import ( + test_files_unchanged_pass, + test_files_unchanged_fail, + ) + # def test_critical_missingfiles_example(self): # """Tests for missing nextflow config and main.nf files""" From 982cc9fc7e6f5a61c39dff2fd4483cd73ebd6ed6 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 8 Apr 2021 10:25:49 +0200 Subject: [PATCH 037/210] Added test for querying PyPI api --- tests/test_utils.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index ba983fc9e5..962319e2a9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,7 +5,10 @@ import nf_core.create import nf_core.utils +import mock import os +import pytest +import requests import tempfile import unittest @@ -92,3 +95,32 @@ def test_list_files_no_git(self): pipeline_obj = nf_core.utils.Pipeline(tmpdir) pipeline_obj._list_files() assert tmp_fn in pipeline_obj.files + + def test_pip_package_pass(self): + result = nf_core.utils.pip_package("multiqc=1.10") + assert type(result) == dict + + @mock.patch("requests.get") + def test_pip_package_timeout(self, mock_get): + """Tests the PyPi connection and simulates a request timeout, which should + return in an addiional warning in the linting""" + # Define the behaviour of the request get mock + mock_get.side_effect = requests.exceptions.Timeout() + # Now do the test + with pytest.raises(LookupError): + nf_core.utils.pip_package("multiqc=1.10") + + @mock.patch("requests.get") + def test_pip_package_connection_error(self, mock_get): + """Tests the PyPi connection and simulates a connection error, which should + result in an additional warning, as we cannot test if dependent module is latest""" + # Define the behaviour of the request get mock + mock_get.side_effect = requests.exceptions.ConnectionError() + # Now do the test + with pytest.raises(LookupError): + nf_core.utils.pip_package("multiqc=1.10") + + def test_pip_erroneous_package(self): + """ Tests the PyPi API package information query """ + with pytest.raises(ValueError): + nf_core.utils.pip_package("not_a_package=1.0") From 7d23409a182442114733bf0852718d7d56137538 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 8 Apr 2021 10:27:43 +0200 Subject: [PATCH 038/210] fix name for pytest_software.yml --- nf_core/modules/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index fbc255deb4..5c2253abf6 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -113,7 +113,7 @@ def create(self): self.tool_dir = self.tool if self.subtool: - self.tool_name = f"{self.tool}_{self.subtool}" + self.tool_name = f"{self.tool}/{self.subtool}" self.tool_dir = os.path.join(self.tool, self.subtool) # Check existance of directories early for fast-fail From 9f9312f7b53cd42a297709e17344c968322f60fa Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 8 Apr 2021 11:02:23 +0200 Subject: [PATCH 039/210] Test cases for working pipeline and a few failing examples --- tests/lint/nextflow_config.py | 53 +++++++++++++++++++++++++++++++++++ tests/test_lint.py | 5 +++- 2 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 tests/lint/nextflow_config.py diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py new file mode 100644 index 0000000000..af41b957c6 --- /dev/null +++ b/tests/lint/nextflow_config.py @@ -0,0 +1,53 @@ +import pytest +import unittest +import tempfile +import os +import shutil + +import nf_core.create +import nf_core.lint + + +def test_nextflow_config_example_pass(self): + """Tests that config variable existence test works with good pipeline example""" + self.lint_obj._load_pipeline_config() + result = self.lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + + +def test_nextflow_config_bad_name_fail(self): + """Tests that config variable existence test fails with bad pipeline name""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load_pipeline_config() + + lint_obj.nf_config["manifest.name"] = "bad_name" + lint_obj.nf_config["process.container"] = "bad_name:dev" + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 + +def test_nextflow_config_bad_container_name_failed(self): + """Tests that config variable existence test fails with bad container name""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load_pipeline_config() + + lint_obj.nf_config["process.container"] = "bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 1 + +def test_nextflow_config_dev_in_release_mode_failed(self): + """Tests that config variable existence test fails with dev version in release mode""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load_pipeline_config() + + lint_obj.release_mode = True + lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 + diff --git a/tests/test_lint.py b/tests/test_lint.py index ce94802cf3..633a294d87 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -203,7 +203,10 @@ def test_sphinx_rst_files(self): from lint.merge_markers import test_merge_markers_found from lint.nextflow_config import ( - test_config_variable_example_pass, + test_nextflow_config_example_pass, + test_nextflow_config_bad_name_fail, + test_nextflow_config_bad_container_name_failed, + test_nextflow_config_dev_in_release_mode_failed, ) from lint.files_unchanged import ( From 81304172ca1ebe2ede2c91587bc4dfbdde9adbea Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 8 Apr 2021 11:23:14 +0200 Subject: [PATCH 040/210] black --- tests/lint/nextflow_config.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py index af41b957c6..81fcdc81fa 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/lint/nextflow_config.py @@ -28,26 +28,27 @@ def test_nextflow_config_bad_name_fail(self): assert len(result["failed"]) == 1 assert len(result["warned"]) == 0 + def test_nextflow_config_bad_container_name_failed(self): """Tests that config variable existence test fails with bad container name""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() - + lint_obj.nf_config["process.container"] = "bad_name" result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 1 + def test_nextflow_config_dev_in_release_mode_failed(self): """Tests that config variable existence test fails with dev version in release mode""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() - + lint_obj.release_mode = True lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" result = lint_obj.nextflow_config() assert len(result["failed"]) == 1 assert len(result["warned"]) == 0 - From 0a29cb57d398b7f8d80ec9b860fdc7717cb99200 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 8 Apr 2021 12:37:42 +0200 Subject: [PATCH 041/210] Modules lint - exit code 1 if we have failures --- nf_core/__main__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 54d0613da3..9ce7cfcfdd 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -515,6 +515,8 @@ def lint(ctx, pipeline_dir, tool, all, local, passed): try: module_lint = nf_core.modules.ModuleLint(dir=pipeline_dir) module_lint.lint(module=tool, all_modules=all, print_results=True, local=local, show_passed=passed) + if len(module_lint.failed) > 0: + sys.exit(1) except nf_core.modules.lint.ModuleLintException as e: log.error(e) sys.exit(1) From 7b7d88231f8bf33f283b6cba9d710a40ab9527ab Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 8 Apr 2021 15:10:22 +0200 Subject: [PATCH 042/210] Added Dockerfile for nf-core/tools --- Dockerfile => base.Dockerfile | 0 requirements.txt | 13 +++++++++++++ setup.py | 19 ++++--------------- tools.Dockerfile | 24 ++++++++++++++++++++++++ 4 files changed, 41 insertions(+), 15 deletions(-) rename Dockerfile => base.Dockerfile (100%) create mode 100644 requirements.txt create mode 100644 tools.Dockerfile diff --git a/Dockerfile b/base.Dockerfile similarity index 100% rename from Dockerfile rename to base.Dockerfile diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000..a8afe909ce --- /dev/null +++ b/requirements.txt @@ -0,0 +1,13 @@ +click +GitPython +jinja2 +jsonschema +packaging +prompt_toolkit>=3.0.3 +pyyaml +pytest-workflow +questionary>=1.8.0 +requests_cache +requests +rich>=9.8.2 +tabulate \ No newline at end of file diff --git a/setup.py b/setup.py index 605e08406b..1916c38b43 100644 --- a/setup.py +++ b/setup.py @@ -7,6 +7,9 @@ with open("README.md") as f: readme = f.read() +with open('requirements.txt') as f: + required = f.read().splitlines() + setup( name="nf-core", version=version, @@ -29,21 +32,7 @@ url="https://github.com/nf-core/tools", license="MIT", entry_points={"console_scripts": ["nf-core=nf_core.__main__:run_nf_core"]}, - install_requires=[ - "click", - "GitPython", - "jinja2", - "jsonschema", - "packaging", - "prompt_toolkit>=3.0.3", - "pyyaml", - "pytest-workflow", - "questionary>=1.8.0", - "requests_cache", - "requests", - "rich>=9.8.2", - "tabulate", - ], + install_requires=required, setup_requires=["twine>=1.11.0", "setuptools>=38.6."], packages=find_packages(exclude=("docs")), include_package_data=True, diff --git a/tools.Dockerfile b/tools.Dockerfile new file mode 100644 index 0000000000..4fae9fe948 --- /dev/null +++ b/tools.Dockerfile @@ -0,0 +1,24 @@ +FROM nextflow/nextflow:21.03.0-edge +LABEL authors="phil.ewels@scilifelab.se,erik.danielsson@scilifelab.se" \ + description="Docker image containing requirements for the nfcore tools" + +# Install python/pip +ENV PYTHONUNBUFFERED=1 +RUN apk add --update --no-cache python3 && ln -sf python3 /usr/bin/python +RUN python3 -m ensurepip +RUN pip3 install --no-cache --upgrade pip setuptools + +# Install dependencies +COPY requirements.txt requirements.txt +RUN pip3 install -r requirements.txt + +# Add the nf-core source files to the image +COPY . /usr/src/nf_core +WORKDIR /usr/src/nf_core + +# Install nf-core +RUN pip3 install . + +# Set up entrypoint and cmd for easy docker usage +ENTRYPOINT [ "nf-core" ] +CMD [ "." ] \ No newline at end of file From e89164a8e10c845b32cbb05a24ede77fdf141a25 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 8 Apr 2021 15:25:24 +0200 Subject: [PATCH 043/210] black --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1916c38b43..978a8e91a3 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ with open("README.md") as f: readme = f.read() -with open('requirements.txt') as f: +with open("requirements.txt") as f: required = f.read().splitlines() setup( From 7838508029b9a5d1babb84a607888e4de2d0e280 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 9 Apr 2021 09:31:52 +0200 Subject: [PATCH 044/210] Rewrote tools Dockerfile with Python base image --- tools.Dockerfile | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/tools.Dockerfile b/tools.Dockerfile index 4fae9fe948..74c82a6a01 100644 --- a/tools.Dockerfile +++ b/tools.Dockerfile @@ -1,23 +1,37 @@ -FROM nextflow/nextflow:21.03.0-edge +FROM python:3.8.9-slim LABEL authors="phil.ewels@scilifelab.se,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for the nfcore tools" -# Install python/pip -ENV PYTHONUNBUFFERED=1 -RUN apk add --update --no-cache python3 && ln -sf python3 /usr/bin/python -RUN python3 -m ensurepip -RUN pip3 install --no-cache --upgrade pip setuptools +# Update pip to latest version +RUN python -m pip install --upgrade pip # Install dependencies COPY requirements.txt requirements.txt -RUN pip3 install -r requirements.txt +RUN python -m pip install -r requirements.txt + +# Install Nextflow dependencies +RUN apt-get update \ + && apt-get upgrade -y \ + && apt-get install -y git \ + && apt-get install -y curl + +# Create man dir required for Java installation +# and install Java +RUN mkdir -p /usr/share/man/man1 \ + && apt-get install -y openjdk-11-jre \ + && apt-get clean -y && rm -rf /var/lib/apt/lists/* + +# Install Nextflow +RUN curl -s https://get.nextflow.io | bash \ + && mv nextflow /usr/local/bin + # Add the nf-core source files to the image COPY . /usr/src/nf_core WORKDIR /usr/src/nf_core # Install nf-core -RUN pip3 install . +RUN python -m pip install . # Set up entrypoint and cmd for easy docker usage ENTRYPOINT [ "nf-core" ] From 9b97ffe996b1802e72a49cf9d4e2d55b46bf75ae Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 9 Apr 2021 10:38:56 +0200 Subject: [PATCH 045/210] Dockerhub push gh actions for tools --- .github/workflows/push_dockerhub_dev.yml | 28 +++++++++++++++++++ .github/workflows/push_dockerhub_release.yml | 29 ++++++++++++++++++++ 2 files changed, 57 insertions(+) create mode 100644 .github/workflows/push_dockerhub_dev.yml create mode 100644 .github/workflows/push_dockerhub_release.yml diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml new file mode 100644 index 0000000000..a676fdee3e --- /dev/null +++ b/.github/workflows/push_dockerhub_dev.yml @@ -0,0 +1,28 @@ +name: nf-core Docker push (dev) +# This builds the docker image and pushes it to DockerHub +# Runs on nf-core repo releases and push event to 'dev' branch (PR merges) +on: + push: + branches: + - dev + +jobs: + push_dockerhub: + name: Push new Docker image to Docker Hub (dev) + runs-on: ubuntu-latest + # Only run for the nf-core repo, for releases and merged PRs + if: ${{ github.repository == 'nf-core/tools'}} + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} + steps: + - name: Check out tools code + uses: actions/checkout@v2 + + - name: Build new docker image + run: docker build --no-cache . -t nf-core/tools:dev -f tools.Dockerfile + + - name: Push Docker image to DockerHub (dev) + run: | + echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin + docker push nf-core/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml new file mode 100644 index 0000000000..df622cd75d --- /dev/null +++ b/.github/workflows/push_dockerhub_release.yml @@ -0,0 +1,29 @@ +name: nf-core Docker push (release) +# This builds the docker image and pushes it to DockerHub +# Runs on nf-core repo releases and push event to 'dev' branch (PR merges) +on: + release: + types: [published] + +jobs: + push_dockerhub: + name: Push new Docker image to Docker Hub (release) + runs-on: ubuntu-latest + # Only run for the nf-core repo, for releases and merged PRs + if: ${{ github.repository == 'nf-core/tools' }} + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} + steps: + - name: Check out tools code + uses: actions/checkout@v2 + + - name: Build new docker image + run: docker build --no-cache . -t nf-core/tools:latest -f tools.Dockerfile + + - name: Push Docker image to DockerHub (release) + run: | + echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin + docker push nf-core/tools:latest + docker tag nf-core/tools:latest nf-core/tools:${{ github.event.release.tag_name }} + docker push nf-core/tools:${{ github.event.release.tag_name }} From 02c3f22e5d581132bf5bbda3bdf17ba5bb08aca9 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 9 Apr 2021 11:36:16 +0200 Subject: [PATCH 046/210] fixed template for "modules create" --- nf_core/module-template/tests/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/module-template/tests/test.yml b/nf_core/module-template/tests/test.yml index 17171d8a89..182a6c5bc7 100644 --- a/nf_core/module-template/tests/test.yml +++ b/nf_core/module-template/tests/test.yml @@ -1,5 +1,5 @@ ## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml {{ tool }}/{{ subtool }} +# nf-core modules create-test-yml {{ tool }}{%- if subtool %}/{{ subtool }}{%- endif %} - name: {{ tool }}{{ ' '+subtool if subtool else '' }} command: nextflow run ./tests/software/{{ tool_dir }} -entry test_{{ tool_name }} -c tests/config/nextflow.config tags: From 91e2074c6b76c8eed5e18c333c621cca8837d1a9 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 9 Apr 2021 14:31:47 +0200 Subject: [PATCH 047/210] Added checks for custom configs --- nf_core/lint/nextflow_config.py | 41 +++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 19f10f045c..2ad36b6056 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -1,6 +1,9 @@ #!/usr/bin/env python import re +import os +import logging +log = logging.getLogger(__name__) def nextflow_config(self): @@ -262,6 +265,44 @@ def nextflow_config(self): ) ) + # Check if custom profile params are set correctly + if self.nf_config.get("params.custom_config_version", "").strip("'") == "master": + passed.append("Config ``params.custom_config_version`` is set to ``master``") + else: + failed.append("Config ``params.custom_config_version`` is not set to ``master``") + + custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/" + self.nf_config.get( + "params.custom_config_version", "" + ).strip("'") + if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: + passed.append("Config ``params.custom_config_base`` is set to ``{}``".format(custom_config_base)) + else: + failed.append("Config ``params.custom_config_base`` is not set to ``{}`` {}".format(custom_config_base)) + + # Check that lines for loading custom profiles exist + lines = [ + r'// Load nf-core custom profiles from different Institutions', + r'try {', + r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', + r"} catch (Exception e) {", + r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', + r"}", + ] + path = os.path.join(self.wf_path, "nextflow.config") + i = 0 + with open(path, "r") as f: + for line in f: + if lines[i] in line: + i += 1 + if i == len(lines): + break + else: + i = 0 + if i == len(lines): + passed.append("Lines for loading custom profiles found") + else: + failed.append("Unable to find lines for loading custom profiles") + for config in ignore_configs: ignored.append("Config ignored: {}".format(self._wrap_quotes(config))) return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} From 64ed98d4a1c11d08155a62acfe09524037080779 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 9 Apr 2021 15:07:52 +0200 Subject: [PATCH 048/210] add new functions.nf file to modules template --- nf_core/module-template/software/functions.nf | 36 ++++++++++++------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/nf_core/module-template/software/functions.nf b/nf_core/module-template/software/functions.nf index f177f0c874..078fda9d21 100644 --- a/nf_core/module-template/software/functions.nf +++ b/nf_core/module-template/software/functions.nf @@ -16,13 +16,13 @@ def getSoftwareName(task_process) { */ def initOptions(Map args) { def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_id = args.publish_by_id ?: false - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' return options } @@ -30,8 +30,8 @@ def initOptions(Map args) { * Tidy up and join elements of a list to return a path string */ def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes return paths.join('/') } @@ -40,10 +40,20 @@ def getPathFromList(path_list) { */ def saveFiles(Map args) { if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) + def ioptions = initOptions(args.options) def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_id) { - path_list.add(args.publish_id) + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } } if (ioptions.publish_files instanceof Map) { for (ext in ioptions.publish_files) { @@ -57,4 +67,4 @@ def saveFiles(Map args) { return "${getPathFromList(path_list)}/$args.filename" } } -} +} \ No newline at end of file From 7b3e8b98226cfbebdbdc81efd124343ed87bfb7b Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 9 Apr 2021 15:46:07 +0200 Subject: [PATCH 049/210] added test for pytest_software.yml entry --- nf_core/modules/lint.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index c5dc501a5b..46e2db4912 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -516,10 +516,36 @@ def lint_module_tests(self): else: self.failed.append(("test_main_exists", "test `main.nf` does not exist", self.test_main_nf)) + # Check that entry in pytest_software.yml exists + try: + pytest_yml_path = os.path.join(self.base_dir, "tests", "config", "pytest_software.yml") + with open(pytest_yml_path, "r") as fh: + pytest_yml = yaml.safe_load(fh) + if self.module_name in pytest_yml.keys(): + self.passed.append(("test_pytest_yml", "correct entry in pytest_software.yml", pytest_yml_path)) + else: + self.failed.append(("test_pytest_yml", "missing entry in pytest_software.yml", pytest_yml_path)) + except FileNotFoundError as e: + log.error(f"Could not open pytest_software.yml file: {e}") + sys.exit(1) + # Lint the test.yml file try: with open(self.test_yml, "r") as fh: test_yml = yaml.safe_load(fh) + + # Verify that tags are correct + all_tags_correct = True + for test in test_yml: + for tag in test["tags"]: + if not tag in [self.module_name, self.module_name.split("/")[0]]: + all_tags_correct = False + + if all_tags_correct: + self.passed.append(("test_yml_tags", "tags adhere to guidelines", self.test_yml)) + else: + self.failed.append(("test_yml_tags", "tags do not adhere to guidelines", self.test_yml)) + self.passed.append(("test_yml_exists", "Test `test.yml` exists", self.test_yml)) except FileNotFoundError: self.failed.append(("test_yml_exists", "Test `test.yml` does not exist", self.test_yml)) From e580b003a65347501f9c2df3d96eb97a21e86314 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 9 Apr 2021 15:49:40 +0200 Subject: [PATCH 050/210] changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c00bd6429..09bb71af75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ * Strip values from `nf-core launch` web response which are False and have no default in the schema [[#976](https://github.com/nf-core/tools/issues/976)] * Try to fix the fix for the automated sync when we submit too many PRs at once [[#970](https://github.com/nf-core/tools/issues/970)] * Added `--conda-name` flag to `nf-core modules create` command to allow sidestepping questionary [[#988](https://github.com/nf-core/tools/issues/988)] +* Extended `nf-core modules lint` functionality to check `test.yml` tags and look for a entry in the `pytest_software.yml` file ### Template From 4eaedc8428aaa72e3c3d5eb082c45511e7d2bab6 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 9 Apr 2021 16:33:25 +0200 Subject: [PATCH 051/210] add template adjustment from @drpatel --- nf_core/module-template/software/main.nf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nf_core/module-template/software/main.nf b/nf_core/module-template/software/main.nf index 46fff0f97b..ab6d713dec 100644 --- a/nf_core/module-template/software/main.nf +++ b/nf_core/module-template/software/main.nf @@ -25,8 +25,7 @@ process {{ tool_name|upper }} { label '{{ process_label }}' publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), publish_id:{{ 'meta.id' if has_meta else "''" }}) } - + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:{{ 'meta' if has_meta else "[:]" }}, publish_by_meta:{{ ['id'] if has_meta else "[]" }}) } // TODO nf-core: List required Conda package(s). // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. From 302d765bb0cc89cde78133070a2ffa45f8746a65 Mon Sep 17 00:00:00 2001 From: Kevin Menden Date: Fri, 9 Apr 2021 16:38:37 +0200 Subject: [PATCH 052/210] Update nf_core/module-template/software/main.nf Co-authored-by: Harshil Patel --- nf_core/module-template/software/main.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/module-template/software/main.nf b/nf_core/module-template/software/main.nf index ab6d713dec..66d1d80e38 100644 --- a/nf_core/module-template/software/main.nf +++ b/nf_core/module-template/software/main.nf @@ -26,6 +26,7 @@ process {{ tool_name|upper }} { publishDir "${params.outdir}", mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:{{ 'meta' if has_meta else "[:]" }}, publish_by_meta:{{ ['id'] if has_meta else "[]" }}) } + // TODO nf-core: List required Conda package(s). // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. From ead3b4c4f207bad1eb5a870eb9da8db7d813f84c Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 16:03:44 +0100 Subject: [PATCH 053/210] Fix modules linting for publish_by_meta --- nf_core/modules/lint.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index 10cb107716..91200767cb 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -673,12 +673,17 @@ def lint_main_nf(self): else: self.failed.append(("main_nf_meta_output", "'meta' map not emitted in output channel(s)", self.main_nf)) - # if meta is specified, it should also be used as 'saveAs ... publishId:meta.id' + # if meta is specified, it should also be used as "saveAs ... meta:meta, publish_by_meta:['id']" save_as = [pl for pl in process_lines if "saveAs" in pl] - if len(save_as) > 0 and re.search("\s*publish_id\s*:\s*meta.id", save_as[0]): - self.passed.append(("main_nf_meta_saveas", "'meta.id' specified in saveAs function", self.main_nf)) + if len(save_as) > 0 and re.search("\s*meta\s*:\s*meta", save_as[0]): + self.passed.append(("main_nf_meta_saveas", "'meta:meta' specified in saveAs function", self.main_nf)) else: - self.failed.append(("main_nf_meta_saveas", "'meta.id' unspecificed in saveAs function", self.main_nf)) + self.failed.append(("main_nf_meta_saveas", "'meta:meta' unspecified in saveAs function", self.main_nf)) + + if len(save_as) > 0 and re.search("\s*publish_by_meta\s*:\s*['id']", save_as[0]): + self.passed.append(("main_nf_publish_meta_saveas", '"publish_by_meta:['id']" specified in saveAs function', self.main_nf)) + else: + self.failed.append(("main_nf_publish_meta_saveas", '"publish_by_meta:['id']" unspecified in saveAs function', self.main_nf)) # Check that a software version is emitted if "version" in outputs: From f23143f245d7b7290da4fb9260634acbdc2cb986 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 16:06:42 +0100 Subject: [PATCH 054/210] Update CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8ee198082..01325e390e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ * Strip values from `nf-core launch` web response which are False and have no default in the schema [[#976](https://github.com/nf-core/tools/issues/976)] * Try to fix the fix for the automated sync when we submit too many PRs at once [[#970](https://github.com/nf-core/tools/issues/970)] * Added `--conda-name` flag to `nf-core modules create` command to allow sidestepping questionary [[#988](https://github.com/nf-core/tools/issues/988)] -* Extended `nf-core modules lint` functionality to check `test.yml` tags and look for a entry in the `pytest_software.yml` file +* Extended `nf-core modules lint` functionality to check tags in `test.yml` and to look for a entry in the `pytest_software.yml` file * Update `modules` commands to use new test tag format `tool/subtool` ### Template From 3010b8b42b923dc8b3a130699ef2942d334e3b41 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 16:09:44 +0100 Subject: [PATCH 055/210] Fix strings --- nf_core/modules/lint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index 91200767cb..d8eecef861 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -681,9 +681,9 @@ def lint_main_nf(self): self.failed.append(("main_nf_meta_saveas", "'meta:meta' unspecified in saveAs function", self.main_nf)) if len(save_as) > 0 and re.search("\s*publish_by_meta\s*:\s*['id']", save_as[0]): - self.passed.append(("main_nf_publish_meta_saveas", '"publish_by_meta:['id']" specified in saveAs function', self.main_nf)) + self.passed.append(("main_nf_publish_meta_saveas", """"publish_by_meta:['id']" specified in saveAs function""", self.main_nf)) else: - self.failed.append(("main_nf_publish_meta_saveas", '"publish_by_meta:['id']" unspecified in saveAs function', self.main_nf)) + self.failed.append(("main_nf_publish_meta_saveas", """"publish_by_meta:['id']" unspecified in saveAs function""", self.main_nf)) # Check that a software version is emitted if "version" in outputs: From eb863e42634d24fd21e3decddaa7eb35b53ea6b0 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 9 Apr 2021 17:10:18 +0200 Subject: [PATCH 056/210] Added additional lint tests and reformatted with black --- nf_core/lint/nextflow_config.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 2ad36b6056..21481d62d1 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -3,6 +3,7 @@ import re import os import logging + log = logging.getLogger(__name__) @@ -102,6 +103,8 @@ def nextflow_config(self): ["process.time"], ["params.outdir"], ["params.input"], + ["params.show_hidden_params"], + ["params.schema_ignore_params"], ] # Throw a warning if these are missing config_warn = [ @@ -277,12 +280,12 @@ def nextflow_config(self): if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: passed.append("Config ``params.custom_config_base`` is set to ``{}``".format(custom_config_base)) else: - failed.append("Config ``params.custom_config_base`` is not set to ``{}`` {}".format(custom_config_base)) + failed.append("Config ``params.custom_config_base`` is not set to ``{}``".format(custom_config_base)) # Check that lines for loading custom profiles exist lines = [ - r'// Load nf-core custom profiles from different Institutions', - r'try {', + r"// Load nf-core custom profiles from different Institutions", + r"try {", r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', r"} catch (Exception e) {", r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', From 28281f55cecc9467921084dffea29af8d8a0c46d Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 16:23:19 +0100 Subject: [PATCH 057/210] Fix jinja2 syntax --- nf_core/module-template/software/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/module-template/software/main.nf b/nf_core/module-template/software/main.nf index 66d1d80e38..6d6fabc8c8 100644 --- a/nf_core/module-template/software/main.nf +++ b/nf_core/module-template/software/main.nf @@ -25,7 +25,7 @@ process {{ tool_name|upper }} { label '{{ process_label }}' publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:{{ 'meta' if has_meta else "[:]" }}, publish_by_meta:{{ ['id'] if has_meta else "[]" }}) } + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:{{ 'meta' if has_meta else "[:]" }}, publish_by_meta:{{ "['id']" if has_meta else "[]" }}) } // TODO nf-core: List required Conda package(s). // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). From 6144dc0c2312a18cbf79b6b8928e03fdd827a8e2 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 16:36:47 +0100 Subject: [PATCH 058/210] Update regex --- nf_core/modules/lint.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index d8eecef861..22f51210fc 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -680,10 +680,10 @@ def lint_main_nf(self): else: self.failed.append(("main_nf_meta_saveas", "'meta:meta' unspecified in saveAs function", self.main_nf)) - if len(save_as) > 0 and re.search("\s*publish_by_meta\s*:\s*['id']", save_as[0]): - self.passed.append(("main_nf_publish_meta_saveas", """"publish_by_meta:['id']" specified in saveAs function""", self.main_nf)) + if len(save_as) > 0 and re.search("\s*publish_by_meta\s*:\s*\['id'\]", save_as[0]): + self.passed.append(("main_nf_publish_meta_saveas", "'publish_by_meta:[\'id\']' specified in saveAs function", self.main_nf)) else: - self.failed.append(("main_nf_publish_meta_saveas", """"publish_by_meta:['id']" unspecified in saveAs function""", self.main_nf)) + self.failed.append(("main_nf_publish_meta_saveas", "'publish_by_meta:[\'id\']' unspecified in saveAs function", self.main_nf)) # Check that a software version is emitted if "version" in outputs: From 2d62952d32f807cbab6a8bb3ba3c059aba6d6f27 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 17:12:58 +0100 Subject: [PATCH 059/210] Fix test numbers --- tests/test_modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index b4809dd270..0f79e68989 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -83,7 +83,7 @@ def test_modules_lint_fastqc(self): self.mods.install("fastqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.passed) == 18 + assert len(module_lint.passed) == 17 assert len(module_lint.warned) == 0 assert len(module_lint.failed) == 0 From ddf809089b52a933857172018903c75444bbdd76 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 17:24:48 +0100 Subject: [PATCH 060/210] Adjust lint numbers --- tests/test_modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index 0f79e68989..f1aa12b501 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -83,7 +83,7 @@ def test_modules_lint_fastqc(self): self.mods.install("fastqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.passed) == 17 + assert len(module_lint.passed) == 19 assert len(module_lint.warned) == 0 assert len(module_lint.failed) == 0 From 47d616bad010bab0c0302e5e1217fdc5a273d574 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 9 Apr 2021 17:31:40 +0100 Subject: [PATCH 061/210] Fix black --- nf_core/modules/lint.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index 22f51210fc..06a9432a12 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -681,9 +681,21 @@ def lint_main_nf(self): self.failed.append(("main_nf_meta_saveas", "'meta:meta' unspecified in saveAs function", self.main_nf)) if len(save_as) > 0 and re.search("\s*publish_by_meta\s*:\s*\['id'\]", save_as[0]): - self.passed.append(("main_nf_publish_meta_saveas", "'publish_by_meta:[\'id\']' specified in saveAs function", self.main_nf)) + self.passed.append( + ( + "main_nf_publish_meta_saveas", + "'publish_by_meta:['id']' specified in saveAs function", + self.main_nf, + ) + ) else: - self.failed.append(("main_nf_publish_meta_saveas", "'publish_by_meta:[\'id\']' unspecified in saveAs function", self.main_nf)) + self.failed.append( + ( + "main_nf_publish_meta_saveas", + "'publish_by_meta:['id']' unspecified in saveAs function", + self.main_nf, + ) + ) # Check that a software version is emitted if "version" in outputs: From 25d222a24f0ff6fde0579fb883b0529123af54f2 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Mon, 12 Apr 2021 09:06:00 +0200 Subject: [PATCH 062/210] Improved error message for missing lines --- nf_core/lint/nextflow_config.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 21481d62d1..31bcaed64e 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -270,17 +270,17 @@ def nextflow_config(self): # Check if custom profile params are set correctly if self.nf_config.get("params.custom_config_version", "").strip("'") == "master": - passed.append("Config ``params.custom_config_version`` is set to ``master``") + passed.append("Config `params.custom_config_version` is set to `master`") else: - failed.append("Config ``params.custom_config_version`` is not set to ``master``") + failed.append("Config `params.custom_config_version` is not set to `master`") custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/" + self.nf_config.get( "params.custom_config_version", "" ).strip("'") if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: - passed.append("Config ``params.custom_config_base`` is set to ``{}``".format(custom_config_base)) + passed.append("Config `params.custom_config_base` is set to `{}`".format(custom_config_base)) else: - failed.append("Config ``params.custom_config_base`` is not set to ``{}``".format(custom_config_base)) + failed.append("Config `params.custom_config_base` is not set to `{}`".format(custom_config_base)) # Check that lines for loading custom profiles exist lines = [ @@ -304,7 +304,11 @@ def nextflow_config(self): if i == len(lines): passed.append("Lines for loading custom profiles found") else: - failed.append("Unable to find lines for loading custom profiles") + lines[2] = "\t" + lines[2] + lines[4] = "\t" + lines[4] + failed.append( + "Lines for loading custom profiles not found. File should contain: ```groovy\n{}".format("\n".join(lines)) + ) for config in ignore_configs: ignored.append("Config ignored: {}".format(self._wrap_quotes(config))) From 86e8c8b6070b6bd48171d2811443add1bc1f5bc6 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Sat, 10 Apr 2021 18:09:47 +0200 Subject: [PATCH 063/210] Added gh actions for base image --- .github/workflows/push_dockerhub_dev.yml | 12 ++++++++++-- .github/workflows/push_dockerhub_release.yml | 14 ++++++++++++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index a676fdee3e..6143a9f424 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -19,10 +19,18 @@ jobs: - name: Check out tools code uses: actions/checkout@v2 - - name: Build new docker image + - name: Build new docker image for tools run: docker build --no-cache . -t nf-core/tools:dev -f tools.Dockerfile - - name: Push Docker image to DockerHub (dev) + - name: Push tools Docker image to DockerHub (dev) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin docker push nf-core/tools:dev + + - name: Build new base docker image for templates + run: docker build --no-cache . -t nf-core/base:dev -f base.Dockerfile + + - name: Push base Docker image to DockerHub (dev) + run: | + echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin + docker push nf-core/base:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index df622cd75d..3cb3eac38a 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -18,12 +18,22 @@ jobs: - name: Check out tools code uses: actions/checkout@v2 - - name: Build new docker image + - name: Build new docker image for tools run: docker build --no-cache . -t nf-core/tools:latest -f tools.Dockerfile - - name: Push Docker image to DockerHub (release) + - name: Push tools Docker image to DockerHub (release) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin docker push nf-core/tools:latest docker tag nf-core/tools:latest nf-core/tools:${{ github.event.release.tag_name }} docker push nf-core/tools:${{ github.event.release.tag_name }} + + - name: Build new base docker image for templates + run: docker build --no-cache . -t nf-core/base:latest -f base.Dockerfile + + - name: Push base Docker image to DockerHub (release) + run: | + echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin + docker push nf-core/base:latest + docker tag nf-core/base:latest nf-core/base:${{ github.event.release.tag_name }} + docker push nf-core/base:${{ github.event.release.tag_name }} From 33cfde12de11bef5e313e4bf04620f3c0536d720 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Mon, 12 Apr 2021 11:37:37 +0200 Subject: [PATCH 064/210] Parallellize actions with matrix --- .github/workflows/push_dockerhub_dev.yml | 18 +++++-------- .github/workflows/push_dockerhub_release.yml | 27 ++++++++------------ 2 files changed, 17 insertions(+), 28 deletions(-) diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index 6143a9f424..b9080cc109 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -15,22 +15,18 @@ jobs: env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} + strategy: + matrix: + image: [base, tools] steps: - name: Check out tools code uses: actions/checkout@v2 - - name: Build new docker image for tools - run: docker build --no-cache . -t nf-core/tools:dev -f tools.Dockerfile + - name: Build new docker image + run: docker build --no-cache . -t nf-core/${{ matrix.image }}:dev -f ${{ matrix.image }}.Dockerfile - - name: Push tools Docker image to DockerHub (dev) + - name: Push Docker image to DockerHub (dev) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nf-core/tools:dev + docker push nf-core/${{ matrix.image }}:dev - - name: Build new base docker image for templates - run: docker build --no-cache . -t nf-core/base:dev -f base.Dockerfile - - - name: Push base Docker image to DockerHub (dev) - run: | - echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nf-core/base:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 3cb3eac38a..21c6c7b1b2 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -14,26 +14,19 @@ jobs: env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} + strategy: + matrix: + image: [base, tools] steps: - - name: Check out tools code + - name: Check out code uses: actions/checkout@v2 - - name: Build new docker image for tools - run: docker build --no-cache . -t nf-core/tools:latest -f tools.Dockerfile + - name: Build new docker image + run: docker build --no-cache . -t nf-core/${{ matrix.image }}:latest -f ${{ matrix.image }}.Dockerfile - - name: Push tools Docker image to DockerHub (release) + - name: Push Docker image to DockerHub (release) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nf-core/tools:latest - docker tag nf-core/tools:latest nf-core/tools:${{ github.event.release.tag_name }} - docker push nf-core/tools:${{ github.event.release.tag_name }} - - - name: Build new base docker image for templates - run: docker build --no-cache . -t nf-core/base:latest -f base.Dockerfile - - - name: Push base Docker image to DockerHub (release) - run: | - echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nf-core/base:latest - docker tag nf-core/base:latest nf-core/base:${{ github.event.release.tag_name }} - docker push nf-core/base:${{ github.event.release.tag_name }} + docker push nf-core/${{ matrix.image }}:latest + docker tag nf-core/${{ matrix.image }}:latest nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} + docker push nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} \ No newline at end of file From 5f660cdbe35a96922fae36a6aa3fbf3d30f6a6d3 Mon Sep 17 00:00:00 2001 From: Erik Danielsson <53212377+ErikDanielsson@users.noreply.github.com> Date: Mon, 12 Apr 2021 12:58:55 +0200 Subject: [PATCH 065/210] Newline on EOF Co-authored-by: Phil Ewels --- tools.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools.Dockerfile b/tools.Dockerfile index 74c82a6a01..89974f7c63 100644 --- a/tools.Dockerfile +++ b/tools.Dockerfile @@ -35,4 +35,4 @@ RUN python -m pip install . # Set up entrypoint and cmd for easy docker usage ENTRYPOINT [ "nf-core" ] -CMD [ "." ] \ No newline at end of file +CMD [ "." ] From 679229023e9dba286492037726e8525ad27edb32 Mon Sep 17 00:00:00 2001 From: Erik Danielsson <53212377+ErikDanielsson@users.noreply.github.com> Date: Mon, 12 Apr 2021 13:01:54 +0200 Subject: [PATCH 066/210] Add strings with format Co-authored-by: Phil Ewels --- nf_core/lint/nextflow_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 31bcaed64e..c525fd6789 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -274,9 +274,9 @@ def nextflow_config(self): else: failed.append("Config `params.custom_config_version` is not set to `master`") - custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/" + self.nf_config.get( + custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/{}".format(self.nf_config.get( "params.custom_config_version", "" - ).strip("'") + ).strip("'")) if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: passed.append("Config `params.custom_config_base` is set to `{}`".format(custom_config_base)) else: From 9d11806acbbd1a4dbb54de1eb8e872ad13a6d1ac Mon Sep 17 00:00:00 2001 From: Erik Danielsson <53212377+ErikDanielsson@users.noreply.github.com> Date: Mon, 12 Apr 2021 13:04:10 +0200 Subject: [PATCH 067/210] Formatting with f-string Co-authored-by: Phil Ewels --- nf_core/lint/nextflow_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index c525fd6789..8a6bfae754 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -304,8 +304,8 @@ def nextflow_config(self): if i == len(lines): passed.append("Lines for loading custom profiles found") else: - lines[2] = "\t" + lines[2] - lines[4] = "\t" + lines[4] + lines[2] = f"\t{lines[2]}" + lines[4] = f"\t{lines[4]}" failed.append( "Lines for loading custom profiles not found. File should contain: ```groovy\n{}".format("\n".join(lines)) ) From e6aefebdf6c1bca5ed8a2f639a874e5a9f302594 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Mon, 12 Apr 2021 13:17:29 +0200 Subject: [PATCH 068/210] black --- nf_core/lint/nextflow_config.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 8a6bfae754..1a7ac44d9d 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -274,9 +274,9 @@ def nextflow_config(self): else: failed.append("Config `params.custom_config_version` is not set to `master`") - custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/{}".format(self.nf_config.get( - "params.custom_config_version", "" - ).strip("'")) + custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/{}".format( + self.nf_config.get("params.custom_config_version", "").strip("'") + ) if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: passed.append("Config `params.custom_config_base` is set to `{}`".format(custom_config_base)) else: From 9d25d377b734d2d9277e26d38db14dde67ba96de Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 12 Apr 2021 13:51:29 +0200 Subject: [PATCH 069/210] Minor whitespace --- .github/workflows/push_dockerhub_release.yml | 2 +- tools.Dockerfile | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 21c6c7b1b2..abbb911adc 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -29,4 +29,4 @@ jobs: echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin docker push nf-core/${{ matrix.image }}:latest docker tag nf-core/${{ matrix.image }}:latest nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} - docker push nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} \ No newline at end of file + docker push nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} diff --git a/tools.Dockerfile b/tools.Dockerfile index 89974f7c63..ac918c37c5 100644 --- a/tools.Dockerfile +++ b/tools.Dockerfile @@ -24,8 +24,6 @@ RUN mkdir -p /usr/share/man/man1 \ # Install Nextflow RUN curl -s https://get.nextflow.io | bash \ && mv nextflow /usr/local/bin - - # Add the nf-core source files to the image COPY . /usr/src/nf_core WORKDIR /usr/src/nf_core From 43ae3e7c721dd260798c17d183efae68b10a26a9 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Mon, 12 Apr 2021 14:27:31 +0200 Subject: [PATCH 070/210] Docs for new linting tests --- nf_core/lint/nextflow_config.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 1a7ac44d9d..dd96b2b477 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -55,6 +55,23 @@ def nextflow_config(self): * ``params.input``: Input data that is not NGS sequencing data + * ``params.custom_config_version`` + + * Should always be set to default value ``master`` + + * ``params.custom_config_base`` + + * Should always be set to default value: + ``https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}`` + + * ``params.show_hidden_params`` + + * Determines whether boilerplate params are showed by schema. Set to ``false`` by default + + * ``params.schema_ignore_params`` + + * A comma separated string of inputs the schema validation should ignore. + **The following variables throw warnings if missing:** * ``manifest.mainScript``: The filename of the main pipeline script (should be ``main.nf``) From 04cef480fd09154c685fe895cf7132c7aa186216 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Tue, 13 Apr 2021 09:26:24 +0200 Subject: [PATCH 071/210] remove the "tool_subtool" tag from create-test-yml --- nf_core/modules/test_yml_builder.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index 255e1ecf64..870f63c30b 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -171,7 +171,6 @@ def build_single_test(self, entry_point): tag_defaults = [] for idx in range(0, len(mod_name_parts)): tag_defaults.append("/".join(mod_name_parts[: idx + 1])) - tag_defaults.append(entry_point.replace("test_", "")) # Remove duplicates tag_defaults = list(set(tag_defaults)) if self.no_prompts: From 4b86d1f1f213c8cd9bb33292e6adcc9a8f47d97d Mon Sep 17 00:00:00 2001 From: drpatelh Date: Tue, 13 Apr 2021 12:45:56 +0100 Subject: [PATCH 072/210] Update regex used for --max_* params in Schema --- nf_core/pipeline-template/nextflow_schema.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 786de002bb..fa5e61aac3 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -183,7 +183,7 @@ "description": "Maximum amount of memory that can be requested for any single job.", "default": "128.GB", "fa_icon": "fas fa-memory", - "pattern": "^[\\d\\.]+\\s*.(K|M|G|T)?B$", + "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", "hidden": true, "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" }, @@ -192,7 +192,7 @@ "description": "Maximum amount of time that can be requested for any single job.", "default": "240.h", "fa_icon": "far fa-clock", - "pattern": "^(\\d+(\\.\\d+)?(?:\\s*|\\.?)(s|m|h|d)\\s*)+$", + "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(s|m|h|day)$", "hidden": true, "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" } From 551f254815a0432bcf04aa38ff8c62e3bce93c93 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 13 Apr 2021 14:23:21 +0200 Subject: [PATCH 073/210] Harder better faster stronger time regex --- nf_core/pipeline-template/nextflow_schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index fa5e61aac3..c712a56f5a 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -192,7 +192,7 @@ "description": "Maximum amount of time that can be requested for any single job.", "default": "240.h", "fa_icon": "far fa-clock", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(s|m|h|day)$", + "pattern": "^(\\d+\\.?\\s*(s|m|h|day)\\s*)+$", "hidden": true, "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" } From d3a10bad453f0a58de9830bf2df0ed9b09ab8f03 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Tue, 13 Apr 2021 13:24:28 +0100 Subject: [PATCH 074/210] Update --max_time regex --- nf_core/pipeline-template/nextflow_schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index fa5e61aac3..c712a56f5a 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -192,7 +192,7 @@ "description": "Maximum amount of time that can be requested for any single job.", "default": "240.h", "fa_icon": "far fa-clock", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(s|m|h|day)$", + "pattern": "^(\\d+\\.?\\s*(s|m|h|day)\\s*)+$", "hidden": true, "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" } From 8f284d1624a32e2bc48b4f3b204a97a4ba07f76b Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Tue, 13 Apr 2021 14:35:01 +0200 Subject: [PATCH 075/210] Wrote gh actions for ftp sync --- ...ls-api-docs.yml => tools-api-docs-dev.yml} | 28 ++++++------- .github/workflows/tools-api-docs-release.yml | 42 +++++++++++++++++++ 2 files changed, 54 insertions(+), 16 deletions(-) rename .github/workflows/{tools-api-docs.yml => tools-api-docs-dev.yml} (51%) create mode 100644 .github/workflows/tools-api-docs-release.yml diff --git a/.github/workflows/tools-api-docs.yml b/.github/workflows/tools-api-docs-dev.yml similarity index 51% rename from .github/workflows/tools-api-docs.yml rename to .github/workflows/tools-api-docs-dev.yml index c978ae1e44..afd24b7bc0 100644 --- a/.github/workflows/tools-api-docs.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -1,7 +1,7 @@ -name: nf-core/tools API docs +name: nf-core/tools dev API docs on: push: - branches: [master, dev] + branches: [dev] jobs: api-docs: @@ -26,18 +26,14 @@ jobs: - name: Build HTML docs run: make --directory ./docs/api html - - name: Push docs to api-doc branch + - name: Sync dev docs if: github.repository == 'nf-core/tools' - run: | - git checkout --orphan api-doc - git rm -r --cache . - rm .gitignore - git config user.email "core@nf-co.re" - git config user.name "nf-core-bot" - git add docs - git commit --message "nf-core/tools docs build - - $GITHUB_REF - $GITHUB_SHA - " - git remote add nf-core https://github.com/nf-core/tools.git - git push --force --set-upstream nf-core api-doc + uses: SamKirkland/FTP-Deploy-Action@4.0.0 + with: + server: nf-co.re + username: nf-core + password: ${{ secrets.ftp_password }} + local-dir: docs + server-dir: public_html/docs/dev/ + protocol: ftps + port: 1234 # I think we need to specify the port if we are using ftps diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml new file mode 100644 index 0000000000..de66590298 --- /dev/null +++ b/.github/workflows/tools-api-docs-release.yml @@ -0,0 +1,42 @@ +name: nf-core/tools release API docs +on: + release: + types: [published] + +jobs: + api-docs: + name: Build & push Sphinx API docs + runs-on: ubuntu-18.04 + strategy: + matrix: + dir: [latest, $GITHUB_REF] + + steps: + - name: Check out source-code repository + uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v1 + with: + python-version: 3.7 + + - name: Install python dependencies + run: | + pip install --upgrade pip + pip install -r ./docs/api/requirements.txt + pip install . + + - name: Build HTML docs + run: make --directory ./docs/api html + + - name: Sync release docs + if: github.repository == 'nf-core/tools' + uses: SamKirkland/FTP-Deploy-Action@4.0.0 + with: + server: nf-co.re + username: nf-core + password: ${{ secrets.ftp_password }} + local-dir: docs + server-dir: public_html/docs/${{ matrix.dir }}/ + protocol: ftps + port: 1234 # I think we need to specify the port if we are using ftps From eb1a3b0516a34a00b4e4a174cc659dc3af05638c Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Tue, 13 Apr 2021 14:46:56 +0200 Subject: [PATCH 076/210] Make more things secret --- .github/workflows/tools-api-docs-dev.yml | 10 +++++----- .github/workflows/tools-api-docs-release.yml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index afd24b7bc0..5cdf47e7e4 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -30,10 +30,10 @@ jobs: if: github.repository == 'nf-core/tools' uses: SamKirkland/FTP-Deploy-Action@4.0.0 with: - server: nf-co.re - username: nf-core + server: ${{ secrets.server }} + username: ${{ secrets.username}} password: ${{ secrets.ftp_password }} local-dir: docs - server-dir: public_html/docs/dev/ - protocol: ftps - port: 1234 # I think we need to specify the port if we are using ftps + server-dir: ${{ secrets.server_dir }}/dev + protocol: ${{ secrets.protocol }} + port: ${{ secrets.port }} diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index de66590298..7992f25e1b 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -33,10 +33,10 @@ jobs: if: github.repository == 'nf-core/tools' uses: SamKirkland/FTP-Deploy-Action@4.0.0 with: - server: nf-co.re - username: nf-core + server: ${{ secrets.server }} + username: ${{ secrets.username}} password: ${{ secrets.ftp_password }} local-dir: docs - server-dir: public_html/docs/${{ matrix.dir }}/ - protocol: ftps - port: 1234 # I think we need to specify the port if we are using ftps + server-dir: ${{ secrets.server_dir }}/${{ matrix.dir }} + protocol: ${{ secrets.protocol }} + port: ${{ secrets.port }} From bf920e0b61a8fb4ad404aeecf62d0b7d707dab67 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Tue, 13 Apr 2021 14:51:04 +0200 Subject: [PATCH 077/210] prefix ftp_ --- .github/workflows/tools-api-docs-dev.yml | 10 +++++----- .github/workflows/tools-api-docs-release.yml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index 5cdf47e7e4..4f940833ef 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -30,10 +30,10 @@ jobs: if: github.repository == 'nf-core/tools' uses: SamKirkland/FTP-Deploy-Action@4.0.0 with: - server: ${{ secrets.server }} - username: ${{ secrets.username}} + server: ${{ secrets.ftp_server }} + username: ${{ secrets.ftp_username}} password: ${{ secrets.ftp_password }} local-dir: docs - server-dir: ${{ secrets.server_dir }}/dev - protocol: ${{ secrets.protocol }} - port: ${{ secrets.port }} + server-dir: ${{ secrets.ftp_server_dir }}/dev/ + protocol: ${{ secrets.ftp_protocol }} + port: ${{ secrets.ftp_port }} diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index 7992f25e1b..dd10197c06 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -33,10 +33,10 @@ jobs: if: github.repository == 'nf-core/tools' uses: SamKirkland/FTP-Deploy-Action@4.0.0 with: - server: ${{ secrets.server }} - username: ${{ secrets.username}} + server: ${{ secrets.ftp_server }} + username: ${{ secrets.ftp_username}} password: ${{ secrets.ftp_password }} local-dir: docs - server-dir: ${{ secrets.server_dir }}/${{ matrix.dir }} - protocol: ${{ secrets.protocol }} - port: ${{ secrets.port }} + server-dir: ${{ secrets.ftp_server_dir }}/${{ matrix.dir }}/ + protocol: ${{ secrets.ftp_protocol }} + port: ${{ secrets.ftp_port }} From c10dfbbb964a71c7b2dc25fb9848fce67e92bf10 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 13 Apr 2021 23:28:29 +0200 Subject: [PATCH 078/210] Apply suggestions from code review --- .github/workflows/tools-api-docs-dev.yml | 3 ++- .github/workflows/tools-api-docs-release.yml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index 4f940833ef..3a3d12e12e 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -33,7 +33,8 @@ jobs: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} password: ${{ secrets.ftp_password }} - local-dir: docs + local-dir: './docs/' server-dir: ${{ secrets.ftp_server_dir }}/dev/ protocol: ${{ secrets.ftp_protocol }} port: ${{ secrets.ftp_port }} + dangerous-clean-slate: true diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index dd10197c06..0db3322dea 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -36,7 +36,8 @@ jobs: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} password: ${{ secrets.ftp_password }} - local-dir: docs + local-dir: './docs/' server-dir: ${{ secrets.ftp_server_dir }}/${{ matrix.dir }}/ protocol: ${{ secrets.ftp_protocol }} port: ${{ secrets.ftp_port }} + dangerous-clean-slate: true From 00f9d057262fbebc369994e4ff57fc457481bbe1 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 00:05:49 +0200 Subject: [PATCH 079/210] Clean up new tools docs FTP push --- .github/workflows/tools-api-docs-dev.yml | 2 +- .github/workflows/tools-api-docs-release.yml | 2 +- docs/api/_src/conf.py | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index 3a3d12e12e..c99efc93f9 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -33,7 +33,7 @@ jobs: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} password: ${{ secrets.ftp_password }} - local-dir: './docs/' + local-dir: './docs/api/_build/html/' server-dir: ${{ secrets.ftp_server_dir }}/dev/ protocol: ${{ secrets.ftp_protocol }} port: ${{ secrets.ftp_port }} diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index 0db3322dea..f2eb7c12f7 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -36,7 +36,7 @@ jobs: server: ${{ secrets.ftp_server }} username: ${{ secrets.ftp_username}} password: ${{ secrets.ftp_password }} - local-dir: './docs/' + local-dir: './docs/api/_build/html/' server-dir: ${{ secrets.ftp_server_dir }}/${{ matrix.dir }}/ protocol: ${{ secrets.ftp_protocol }} port: ${{ secrets.ftp_port }} diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 68023db2f3..25bcf1bf07 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -102,6 +102,8 @@ # # html_sidebars = {} +# Don't copy the .rst output into the build +html_copy_source = False # -- Options for HTMLHelp output --------------------------------------------- From 2542c8d42467cc137b033e96adf64c69fdc727ea Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 00:07:42 +0200 Subject: [PATCH 080/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01325e390e..4170955cc9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ * Added `--conda-name` flag to `nf-core modules create` command to allow sidestepping questionary [[#988](https://github.com/nf-core/tools/issues/988)] * Extended `nf-core modules lint` functionality to check tags in `test.yml` and to look for a entry in the `pytest_software.yml` file * Update `modules` commands to use new test tag format `tool/subtool` +* Rewrite how the tools documentation is deployed to the website, to allow multiple versions ### Template From d70e2dccea44c21a3b438f9afda1c4f85394dfb0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 00:41:22 +0200 Subject: [PATCH 081/210] Tidy + reorganise tools lint docs --- docs/api/_src/conf.py | 6 +-- docs/api/_src/index.rst | 11 ++--- .../actions_awsfulltest.rst | 0 .../actions_awstest.rst | 0 .../actions_ci.rst | 0 .../actions_schema_validation.rst | 0 .../conda_dockerfile.rst | 0 .../conda_env_yaml.rst | 0 .../files_exist.rst | 1 + .../files_unchanged.rst | 0 .../index.rst | 2 +- .../merge_markers.rst | 0 .../nextflow_config.rst | 0 .../pipeline_name_conventions.rst | 0 .../pipeline_todos.rst | 0 .../readme.rst | 0 .../schema_lint.rst | 0 .../schema_params.rst | 0 .../template_strings.rst | 0 .../version_consistency.rst | 0 docs/api/make_lint_rst.py | 2 +- nf_core/lint/actions_schema_validation.py | 6 +-- nf_core/lint/files_exist.py | 41 +++++++++++-------- 23 files changed, 37 insertions(+), 32 deletions(-) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/actions_awsfulltest.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/actions_awstest.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/actions_ci.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/actions_schema_validation.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/conda_dockerfile.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/conda_env_yaml.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/files_exist.rst (98%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/files_unchanged.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/index.rst (85%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/merge_markers.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/nextflow_config.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/pipeline_name_conventions.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/pipeline_todos.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/readme.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/schema_lint.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/schema_params.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/template_strings.rst (100%) rename docs/api/_src/{lint_tests => pipeline_lint_tests}/version_consistency.rst (100%) diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 25bcf1bf07..12dc7163cb 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -20,9 +20,9 @@ # -- Project information ----------------------------------------------------- -project = "nf-core tools API" -copyright = "2019, Phil Ewels, Sven Fillinger" -author = "Phil Ewels, Sven Fillinger" +project = "nf-core/tools" +copyright = "2019, nf-core community" +author = "Numerous nf-core contributors" # The short X.Y version version = nf_core.__version__ diff --git a/docs/api/_src/index.rst b/docs/api/_src/index.rst index 9236b45331..8561e2757e 100644 --- a/docs/api/_src/index.rst +++ b/docs/api/_src/index.rst @@ -1,10 +1,5 @@ -.. nf-core documentation master file, created by - sphinx-quickstart on Thu Jan 3 16:24:03 2019. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to nf-core tools API documentation! -=========================================== +nf-core/tools documentation +=========================== .. toctree:: :hidden: @@ -12,7 +7,7 @@ Welcome to nf-core tools API documentation! :caption: Contents: :glob: - lint_tests/index.rst + pipeline_lint_tests/index.rst api/index.rst This documentation is for the ``nf-core/tools`` package. diff --git a/docs/api/_src/lint_tests/actions_awsfulltest.rst b/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.rst similarity index 100% rename from docs/api/_src/lint_tests/actions_awsfulltest.rst rename to docs/api/_src/pipeline_lint_tests/actions_awsfulltest.rst diff --git a/docs/api/_src/lint_tests/actions_awstest.rst b/docs/api/_src/pipeline_lint_tests/actions_awstest.rst similarity index 100% rename from docs/api/_src/lint_tests/actions_awstest.rst rename to docs/api/_src/pipeline_lint_tests/actions_awstest.rst diff --git a/docs/api/_src/lint_tests/actions_ci.rst b/docs/api/_src/pipeline_lint_tests/actions_ci.rst similarity index 100% rename from docs/api/_src/lint_tests/actions_ci.rst rename to docs/api/_src/pipeline_lint_tests/actions_ci.rst diff --git a/docs/api/_src/lint_tests/actions_schema_validation.rst b/docs/api/_src/pipeline_lint_tests/actions_schema_validation.rst similarity index 100% rename from docs/api/_src/lint_tests/actions_schema_validation.rst rename to docs/api/_src/pipeline_lint_tests/actions_schema_validation.rst diff --git a/docs/api/_src/lint_tests/conda_dockerfile.rst b/docs/api/_src/pipeline_lint_tests/conda_dockerfile.rst similarity index 100% rename from docs/api/_src/lint_tests/conda_dockerfile.rst rename to docs/api/_src/pipeline_lint_tests/conda_dockerfile.rst diff --git a/docs/api/_src/lint_tests/conda_env_yaml.rst b/docs/api/_src/pipeline_lint_tests/conda_env_yaml.rst similarity index 100% rename from docs/api/_src/lint_tests/conda_env_yaml.rst rename to docs/api/_src/pipeline_lint_tests/conda_env_yaml.rst diff --git a/docs/api/_src/lint_tests/files_exist.rst b/docs/api/_src/pipeline_lint_tests/files_exist.rst similarity index 98% rename from docs/api/_src/lint_tests/files_exist.rst rename to docs/api/_src/pipeline_lint_tests/files_exist.rst index 04b87f3277..5d4700af00 100644 --- a/docs/api/_src/lint_tests/files_exist.rst +++ b/docs/api/_src/pipeline_lint_tests/files_exist.rst @@ -2,3 +2,4 @@ files_exist =========== .. automethod:: nf_core.lint.PipelineLint.files_exist + diff --git a/docs/api/_src/lint_tests/files_unchanged.rst b/docs/api/_src/pipeline_lint_tests/files_unchanged.rst similarity index 100% rename from docs/api/_src/lint_tests/files_unchanged.rst rename to docs/api/_src/pipeline_lint_tests/files_unchanged.rst diff --git a/docs/api/_src/lint_tests/index.rst b/docs/api/_src/pipeline_lint_tests/index.rst similarity index 85% rename from docs/api/_src/lint_tests/index.rst rename to docs/api/_src/pipeline_lint_tests/index.rst index 641c85d9e7..6b6d2d9717 100644 --- a/docs/api/_src/lint_tests/index.rst +++ b/docs/api/_src/pipeline_lint_tests/index.rst @@ -1,4 +1,4 @@ -Lint tests +Pipline lint tests ============================================ .. toctree:: diff --git a/docs/api/_src/lint_tests/merge_markers.rst b/docs/api/_src/pipeline_lint_tests/merge_markers.rst similarity index 100% rename from docs/api/_src/lint_tests/merge_markers.rst rename to docs/api/_src/pipeline_lint_tests/merge_markers.rst diff --git a/docs/api/_src/lint_tests/nextflow_config.rst b/docs/api/_src/pipeline_lint_tests/nextflow_config.rst similarity index 100% rename from docs/api/_src/lint_tests/nextflow_config.rst rename to docs/api/_src/pipeline_lint_tests/nextflow_config.rst diff --git a/docs/api/_src/lint_tests/pipeline_name_conventions.rst b/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.rst similarity index 100% rename from docs/api/_src/lint_tests/pipeline_name_conventions.rst rename to docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.rst diff --git a/docs/api/_src/lint_tests/pipeline_todos.rst b/docs/api/_src/pipeline_lint_tests/pipeline_todos.rst similarity index 100% rename from docs/api/_src/lint_tests/pipeline_todos.rst rename to docs/api/_src/pipeline_lint_tests/pipeline_todos.rst diff --git a/docs/api/_src/lint_tests/readme.rst b/docs/api/_src/pipeline_lint_tests/readme.rst similarity index 100% rename from docs/api/_src/lint_tests/readme.rst rename to docs/api/_src/pipeline_lint_tests/readme.rst diff --git a/docs/api/_src/lint_tests/schema_lint.rst b/docs/api/_src/pipeline_lint_tests/schema_lint.rst similarity index 100% rename from docs/api/_src/lint_tests/schema_lint.rst rename to docs/api/_src/pipeline_lint_tests/schema_lint.rst diff --git a/docs/api/_src/lint_tests/schema_params.rst b/docs/api/_src/pipeline_lint_tests/schema_params.rst similarity index 100% rename from docs/api/_src/lint_tests/schema_params.rst rename to docs/api/_src/pipeline_lint_tests/schema_params.rst diff --git a/docs/api/_src/lint_tests/template_strings.rst b/docs/api/_src/pipeline_lint_tests/template_strings.rst similarity index 100% rename from docs/api/_src/lint_tests/template_strings.rst rename to docs/api/_src/pipeline_lint_tests/template_strings.rst diff --git a/docs/api/_src/lint_tests/version_consistency.rst b/docs/api/_src/pipeline_lint_tests/version_consistency.rst similarity index 100% rename from docs/api/_src/lint_tests/version_consistency.rst rename to docs/api/_src/pipeline_lint_tests/version_consistency.rst diff --git a/docs/api/make_lint_rst.py b/docs/api/make_lint_rst.py index 48305a9f58..67d2bc5ef7 100644 --- a/docs/api/make_lint_rst.py +++ b/docs/api/make_lint_rst.py @@ -4,7 +4,7 @@ import os import nf_core.lint -docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "lint_tests") +docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "pipeline_lint_tests") # Get list of existing .rst files existing_docs = [] diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/lint/actions_schema_validation.py index a86d822a7e..2d2671933b 100644 --- a/nf_core/lint/actions_schema_validation.py +++ b/nf_core/lint/actions_schema_validation.py @@ -13,10 +13,10 @@ def actions_schema_validation(self): """Checks that the GitHub Action workflow yml/yaml files adhere to the correct schema nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others. - These workflows are defined by ``yml``scripts in ``.github/workflows/``. This lint test verifies that these scripts are valid - by comparing them against the JSON schema for GitHub workflows + These workflows are defined by ``yml`` scripts in ``.github/workflows/``. This lint test verifies that these scripts are valid + by comparing them against the `JSON schema for GitHub workflows `_. - To pass this test, make sure that all your workflows contain the required properties ``on`` and ``jobs``and that + To pass this test, make sure that all your workflows contain the required properties ``on`` and ``jobs`` and that all other properties are of the correct type, as specified in the schema (link above). """ passed = [] diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index 4a387c0cce..3a2c142951 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -14,7 +14,9 @@ def files_exist(self): If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong. All lint tests are stopped immediately with a critical error message. - Files that **must** be present:: + Files that *must* be present: + + .. code-block:: bash .gitattributes .github/.dockstore.yml @@ -48,26 +50,32 @@ def files_exist(self): nextflow.config README.md - Files that *should* be present:: + Files that *should* be present: + + .. code-block:: bash + + main.nf + environment.yml + Dockerfile + conf/base.config + .github/workflows/awstest.yml + .github/workflows/awsfulltest.yml + + Files that *must not* be present: - 'main.nf', - 'environment.yml', - 'Dockerfile', - 'conf/base.config', - '.github/workflows/awstest.yml', - '.github/workflows/awsfulltest.yml' + .. code-block:: bash - Files that *must not* be present:: + Singularity + parameters.settings.json + bin/markdown_to_html.r + conf/aws.config + .github/workflows/push_dockerhub.yml - 'Singularity', - 'parameters.settings.json', - 'bin/markdown_to_html.r', - 'conf/aws.config', - '.github/workflows/push_dockerhub.yml' + Files that *should not* be present: - Files that *should not* be present:: + .. code-block:: bash - '.travis.yml' + .travis.yml """ passed = [] @@ -77,6 +85,7 @@ def files_exist(self): # NB: Should all be files, not directories # List of lists. Passes if any of the files in the sublist are found. + #: test autodoc short_name = self.nf_config["manifest.name"].strip("\"'").replace("nf-core/", "") files_fail = [ [".gitattributes"], From 587642d14ade99f3d69cee56244197ba22bbd057 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 01:24:14 +0200 Subject: [PATCH 082/210] No dangerous clean slate for docs ftp --- .github/workflows/tools-api-docs-dev.yml | 1 - .github/workflows/tools-api-docs-release.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index c99efc93f9..c8ccf211d6 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -37,4 +37,3 @@ jobs: server-dir: ${{ secrets.ftp_server_dir }}/dev/ protocol: ${{ secrets.ftp_protocol }} port: ${{ secrets.ftp_port }} - dangerous-clean-slate: true diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index f2eb7c12f7..3245b555e5 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -40,4 +40,3 @@ jobs: server-dir: ${{ secrets.ftp_server_dir }}/${{ matrix.dir }}/ protocol: ${{ secrets.ftp_protocol }} port: ${{ secrets.ftp_port }} - dangerous-clean-slate: true From 54eca9ad6d60a524302207f0cdfa33a8f04b73ea Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 01:27:19 +0200 Subject: [PATCH 083/210] Fix pytests --- tests/test_lint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_lint.py b/tests/test_lint.py index 633a294d87..edc124118a 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -154,7 +154,7 @@ def test_sphinx_rst_files(self): and that there are no unexpected files (eg. deleted lint tests)""" docs_basedir = os.path.join( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "lint_tests" + os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "pipeline_lint_tests" ) # Get list of existing .rst files From ef46447e030de30ae0d1d81848b374117294bb75 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Wed, 14 Apr 2021 09:57:39 +0200 Subject: [PATCH 084/210] Initialize custom_profile_name in nextflow config --- nf_core/pipeline-template/nextflow.config | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 72949ee138..e8b125ab38 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -31,6 +31,7 @@ params { custom_config_version = 'master' custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" hostnames = false + config_profile_name = null config_profile_description = false config_profile_contact = false config_profile_url = false From 662ec8598cc96320f272bd246021fc8290977d90 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Wed, 14 Apr 2021 11:17:47 +0200 Subject: [PATCH 085/210] make cleanParameters more robust --- CHANGELOG.md | 1 + nf_core/pipeline-template/lib/NfcoreSchema.groovy | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c00bd6429..cdc2a9e794 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ * Added a timestamp to the trace + timetime + report + dag filenames to fix overwrite issue on AWS * Rewrite the `params_summary_log()` function to properly ignore unset params and have nicer formatting [[#971](https://github.com/nf-core/tools/issues/971)] * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +* Convert `d` to `day` in the `cleanParameters` function to make Duration objects like `2d` pass the validation [[#858](https://github.com/nf-core/tools/issues/858)] ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy index adfb2aec31..16137d763c 100644 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ b/nf_core/pipeline-template/lib/NfcoreSchema.groovy @@ -243,7 +243,7 @@ class NfcoreSchema { } // Cast Duration to String if (p['value'].getClass() == nextflow.util.Duration) { - new_params.replace(p.key, p['value'].toString()) + new_params.replace(p.key, p['value'].toString().replaceFirst(/d\s?$/, "day")) } // Cast LinkedHashMap to String if (p['value'].getClass() == LinkedHashMap) { From 8e91200b64a5628341be488475f87e2219fa3eda Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 11:34:46 +0200 Subject: [PATCH 086/210] Docker is nfcore not nf-core --- .github/workflows/push_dockerhub_dev.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index b9080cc109..be2ad39945 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -23,10 +23,10 @@ jobs: uses: actions/checkout@v2 - name: Build new docker image - run: docker build --no-cache . -t nf-core/${{ matrix.image }}:dev -f ${{ matrix.image }}.Dockerfile + run: docker build --no-cache . -t nfcore/${{ matrix.image }}:dev -f ${{ matrix.image }}.Dockerfile - name: Push Docker image to DockerHub (dev) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nf-core/${{ matrix.image }}:dev + docker push nfcore/${{ matrix.image }}:dev From e5e3742becf5d0f0a5d0290fe44c911ef9a2e26b Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 11:35:19 +0200 Subject: [PATCH 087/210] docker wf fix for release --- .github/workflows/push_dockerhub_release.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index abbb911adc..906dee7197 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -22,11 +22,11 @@ jobs: uses: actions/checkout@v2 - name: Build new docker image - run: docker build --no-cache . -t nf-core/${{ matrix.image }}:latest -f ${{ matrix.image }}.Dockerfile + run: docker build --no-cache . -t nfcore/${{ matrix.image }}:latest -f ${{ matrix.image }}.Dockerfile - name: Push Docker image to DockerHub (release) run: | echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nf-core/${{ matrix.image }}:latest - docker tag nf-core/${{ matrix.image }}:latest nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} - docker push nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} + docker push nfcore/${{ matrix.image }}:latest + docker tag nfcore/${{ matrix.image }}:latest nf-core/${{ matrix.image }}:${{ github.event.release.tag_name }} + docker push nfcore/${{ matrix.image }}:${{ github.event.release.tag_name }} From 18b6900f6f3daa915c40655946f23ccbdb314774 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 11:37:56 +0200 Subject: [PATCH 088/210] Update docs/api/_src/conf.py --- docs/api/_src/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 12dc7163cb..36807cf340 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -21,7 +21,7 @@ # -- Project information ----------------------------------------------------- project = "nf-core/tools" -copyright = "2019, nf-core community" +copyright = "2021, nf-core community" author = "Numerous nf-core contributors" # The short X.Y version From ce2a2073c7bf9399114c9af62469e7910b2d9d5f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 12:15:28 +0200 Subject: [PATCH 089/210] Write docs for running nf-core tools docker image --- README.md | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/README.md b/README.md index 452b9abae8..befd6091d8 100644 --- a/README.md +++ b/README.md @@ -65,6 +65,46 @@ conda activate nf-core pip install nf-core ``` +### Docker image + +There is a docker image that you can use to run `nf-core/tools` that has all of the requirements packaged (including Nextflow) and so should work out of the box. It is called [`nfcore/tools`](https://hub.docker.com/r/nfcore/tools) _**(NB: no hyphen!)**_ + +You can use this container on the command line as follows: + +```bash +docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools +``` + +* `-i` and `-t` are needed for the interactive cli prompts to work (this tells Docker to use a pseudo-tty with stdin attached) +* The `-v` argument tells Docker to bind your current working directory (`pwd`) to the same path inside the container, so that files created there will be saved to your local file system outside of the container. +* `-w` sets the working directory in the container to this path, so that it's the same as your working directory outside of the container. + +After the above base command, you can use the regular command line flags that you would use with other types of installation. +For example, to launch the `viralrecon` pipeline: + +```bash +docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 +``` + +#### Docker bash alias + +The above base command is a little verbose, so if you are using this a lot it may be worth adding the following bash alias to your `~/.bashrc` file: + +```bash +alias nf-core="docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools" +``` + +Once applied (you may need to reload your shell if added to your `.bashrc`) you can just use `nf-core` instead: + +```bash +nf-core list +``` + +#### Docker versions + +You can use docker image tags to specify the version you would like to use. For example, `nfcore/tools:dev` for the latest development version of the code, or `nfcore/tools:1.14` for version `1.14` of tools. +If you omit this, it will default to `:latest`, which should be the latest stable release. + ### Development version If you would like the latest development version of tools, the command is: From ed1b871a0a7365975f76fbf38b457d01894f3367 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Wed, 14 Apr 2021 12:15:44 +0200 Subject: [PATCH 090/210] use negative lookahead --- nf_core/pipeline-template/lib/NfcoreSchema.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy index 16137d763c..07700e772f 100644 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ b/nf_core/pipeline-template/lib/NfcoreSchema.groovy @@ -243,7 +243,7 @@ class NfcoreSchema { } // Cast Duration to String if (p['value'].getClass() == nextflow.util.Duration) { - new_params.replace(p.key, p['value'].toString().replaceFirst(/d\s?$/, "day")) + new_params.replace(p.key, p['value'].toString().replaceFirst(/d(?!.)/, "day")) } // Cast LinkedHashMap to String if (p['value'].getClass() == LinkedHashMap) { From 147a6131ae78547e66b405f1b4c68e81aced485e Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 12:17:51 +0200 Subject: [PATCH 091/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4170955cc9..9e18774a3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ * Extended `nf-core modules lint` functionality to check tags in `test.yml` and to look for a entry in the `pytest_software.yml` file * Update `modules` commands to use new test tag format `tool/subtool` * Rewrite how the tools documentation is deployed to the website, to allow multiple versions +* Created new Docker image for the tools cli package - see installation docs for details [[#917](https://github.com/nf-core/tools/issues/917)] ### Template From 50cbd384f32073b2a8bc0a2a9e1ce6b2f2b26ec2 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Wed, 14 Apr 2021 12:41:52 +0200 Subject: [PATCH 092/210] use \S instead of . --- nf_core/pipeline-template/lib/NfcoreSchema.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy index 07700e772f..d591b434c3 100644 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ b/nf_core/pipeline-template/lib/NfcoreSchema.groovy @@ -243,7 +243,7 @@ class NfcoreSchema { } // Cast Duration to String if (p['value'].getClass() == nextflow.util.Duration) { - new_params.replace(p.key, p['value'].toString().replaceFirst(/d(?!.)/, "day")) + new_params.replace(p.key, p['value'].toString().replaceFirst(/d(?!\S)/, "day")) } // Cast LinkedHashMap to String if (p['value'].getClass() == LinkedHashMap) { From ca7285107d20ec038b21449c2d14057b7c111bc0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 13:47:27 +0200 Subject: [PATCH 093/210] Docker docs - mention NXF_SINGULARITY_CACHE --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index befd6091d8..6ed20319ed 100644 --- a/README.md +++ b/README.md @@ -86,6 +86,12 @@ For example, to launch the `viralrecon` pipeline: docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 ``` +If you use `$NXF_SINGULARITY_CACHE` for downloads, you'll also need to make this folder and environment variable available to the continer: + +```bash +docker run -itv `pwd`:`pwd` -v $NXF_SINGULARITY_CACHE:$NXF_SINGULARITY_CACHE -e NXF_SINGULARITY_CACHE -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 +``` + #### Docker bash alias The above base command is a little verbose, so if you are using this a lot it may be worth adding the following bash alias to your `~/.bashrc` file: From bf83b05745d094ac922499f15b29374c9ff2f16c Mon Sep 17 00:00:00 2001 From: Maxime Garcia Date: Wed, 14 Apr 2021 14:37:04 +0200 Subject: [PATCH 094/210] Fix typo NXF_SINGULARITY_CACHE -> NXF_SINGULARITY_CACHEDIR --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 6ed20319ed..3713927ef6 100644 --- a/README.md +++ b/README.md @@ -86,10 +86,10 @@ For example, to launch the `viralrecon` pipeline: docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 ``` -If you use `$NXF_SINGULARITY_CACHE` for downloads, you'll also need to make this folder and environment variable available to the continer: +If you use `$NXF_SINGULARITY_CACHEDIR` for downloads, you'll also need to make this folder and environment variable available to the continer: ```bash -docker run -itv `pwd`:`pwd` -v $NXF_SINGULARITY_CACHE:$NXF_SINGULARITY_CACHE -e NXF_SINGULARITY_CACHE -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 +docker run -itv `pwd`:`pwd` -v $NXF_SINGULARITY_CACHEDIR:$NXF_SINGULARITY_CACHEDIR -e NXF_SINGULARITY_CACHEDIR -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 ``` #### Docker bash alias From 4395bc0097ef19083ca638cca39e30c987c476c4 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 19:49:57 +0200 Subject: [PATCH 095/210] Docker - add `-u $(id -u):$(id -g)` to the docs --- README.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 3713927ef6..19189d3177 100644 --- a/README.md +++ b/README.md @@ -72,35 +72,37 @@ There is a docker image that you can use to run `nf-core/tools` that has all of You can use this container on the command line as follows: ```bash -docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools +docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools ``` * `-i` and `-t` are needed for the interactive cli prompts to work (this tells Docker to use a pseudo-tty with stdin attached) * The `-v` argument tells Docker to bind your current working directory (`pwd`) to the same path inside the container, so that files created there will be saved to your local file system outside of the container. * `-w` sets the working directory in the container to this path, so that it's the same as your working directory outside of the container. +* `-u` sets your local user account as the user inside the container, so that any files created have the correct ownership permissions After the above base command, you can use the regular command line flags that you would use with other types of installation. For example, to launch the `viralrecon` pipeline: ```bash -docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 +docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools launch viralrecon -r 1.1.0 ``` If you use `$NXF_SINGULARITY_CACHEDIR` for downloads, you'll also need to make this folder and environment variable available to the continer: ```bash -docker run -itv `pwd`:`pwd` -v $NXF_SINGULARITY_CACHEDIR:$NXF_SINGULARITY_CACHEDIR -e NXF_SINGULARITY_CACHEDIR -w `pwd` nfcore/tools launch viralrecon -r 1.1.0 +docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) -v $NXF_SINGULARITY_CACHEDIR:$NXF_SINGULARITY_CACHEDIR -e NXF_SINGULARITY_CACHEDIR nfcore/tools launch viralrecon -r 1.1.0 ``` #### Docker bash alias -The above base command is a little verbose, so if you are using this a lot it may be worth adding the following bash alias to your `~/.bashrc` file: +The above base command is a bit of a mouthful to type, to say the least. +To make it easier to use, we highly recommend adding the following bash alias to your `~/.bashrc` file: ```bash -alias nf-core="docker run -itv `pwd`:`pwd` -w `pwd` nfcore/tools" +alias nf-core="docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools" ``` -Once applied (you may need to reload your shell if added to your `.bashrc`) you can just use `nf-core` instead: +Once applied (you may need to reload your shell) you can just use the `nf-core` command instead: ```bash nf-core list From b941890eb8b521af84fc3e59e0c31cf12d40be2e Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 22:28:28 +0200 Subject: [PATCH 096/210] Fix lint config for test 'nextflow_config' --- nf_core/lint/nextflow_config.py | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 19f10f045c..58c424ccbd 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -77,6 +77,22 @@ def nextflow_config(self): **The following Nextflow syntax is depreciated and fails the test if present:** * Process-level configuration syntax still using the old Nextflow syntax, for example: ``process.$fastqc`` instead of ``process withName:'fastqc'``. + + .. tip:: You can choose to ignore tests for the presence or absence of specific config variables + by creating a file called ``.nf-core-lint.yml`` in the root of your pipeline and creating + a list the config variables that should be ignored. For example: + + .. code-block:: yaml + + nextflow_config: + - params.input + + The other checks in this test (depreciated syntax etc) can not be individually identified, + but you can skip the entire test block if you wish: + + .. code-block:: yaml + + nextflow_config: False """ passed = [] warned = [] @@ -125,7 +141,8 @@ def nextflow_config(self): for cfs in config_fail: for cf in cfs: if cf in ignore_configs: - continue + ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) + break if cf in self.nf_config.keys(): passed.append("Config variable found: {}".format(self._wrap_quotes(cf))) break @@ -134,7 +151,8 @@ def nextflow_config(self): for cfs in config_warn: for cf in cfs: if cf in ignore_configs: - continue + ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) + break if cf in self.nf_config.keys(): passed.append("Config variable found: {}".format(self._wrap_quotes(cf))) break @@ -142,7 +160,8 @@ def nextflow_config(self): warned.append("Config variable not found: {}".format(self._wrap_quotes(cfs))) for cf in config_fail_ifdefined: if cf in ignore_configs: - continue + ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) + break if cf not in self.nf_config.keys(): passed.append("Config variable (correctly) not found: {}".format(self._wrap_quotes(cf))) else: @@ -262,6 +281,4 @@ def nextflow_config(self): ) ) - for config in ignore_configs: - ignored.append("Config ignored: {}".format(self._wrap_quotes(config))) return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} From b332bf492c6b93ebb620f9f950e39737c089807a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 22:38:31 +0200 Subject: [PATCH 097/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2abfff1858..98d64ec86c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ * Update `modules` commands to use new test tag format `tool/subtool` * Rewrite how the tools documentation is deployed to the website, to allow multiple versions * Created new Docker image for the tools cli package - see installation docs for details [[#917](https://github.com/nf-core/tools/issues/917)] +* Fix bug in nf-core lint config skipping for the `nextflow_config` test [[#1019](https://github.com/nf-core/tools/issues/1019)] ### Template From 4edee43a50a0ccf65d562132aae50d4705977884 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 22:55:24 +0200 Subject: [PATCH 098/210] nf-core lint - new -k option to run only given lint tests --- CHANGELOG.md | 1 + README.md | 2 ++ nf_core/__main__.py | 5 +++-- nf_core/lint/__init__.py | 22 +++++++++++++++++++--- 4 files changed, 25 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2abfff1858..4c36ca5b8d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ * Update `modules` commands to use new test tag format `tool/subtool` * Rewrite how the tools documentation is deployed to the website, to allow multiple versions * Created new Docker image for the tools cli package - see installation docs for details [[#917](https://github.com/nf-core/tools/issues/917)] +* New `-k`/`--key` cli option for `nf-core lint` to allow you to run only named lint tests, for faster local debugging ### Template diff --git a/README.md b/README.md index 19189d3177..59b9278d89 100644 --- a/README.md +++ b/README.md @@ -602,6 +602,8 @@ Tip: Some of these linting errors can automatically be resolved with the followi nf-core lint . --fix conda_env_yaml ``` +You can use the `-k` / `--key` flag to run only named tests for faster debugging, eg: `nf-core lint . --key files_unchanged` + ### Linting documentation Each test result name on the left is a terminal hyperlink. diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 9ce7cfcfdd..79b03950ba 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -305,11 +305,12 @@ def create(name, description, author, version, no_git, force, outdir): @click.option( "-f", "--fix", type=str, metavar="", multiple=True, help="Attempt to automatically fix specified lint test" ) +@click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests") @click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") @click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") @click.option("--markdown", type=str, metavar="", help="File to write linting results to (Markdown)") @click.option("--json", type=str, metavar="", help="File to write linting results to (JSON)") -def lint(pipeline_dir, release, fix, show_passed, fail_ignored, markdown, json): +def lint(pipeline_dir, release, fix, key, show_passed, fail_ignored, markdown, json): """ Check pipeline code against nf-core guidelines. @@ -323,7 +324,7 @@ def lint(pipeline_dir, release, fix, show_passed, fail_ignored, markdown, json): # Run the lint tests! try: - lint_obj = nf_core.lint.run_linting(pipeline_dir, release, fix, show_passed, fail_ignored, markdown, json) + lint_obj = nf_core.lint.run_linting(pipeline_dir, release, fix, key, show_passed, fail_ignored, markdown, json) if len(lint_obj.failed) > 0: sys.exit(1) except AssertionError as e: diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 548d1e8d87..c43992c80d 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -24,7 +24,7 @@ def run_linting( - pipeline_dir, release_mode=False, fix=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None + pipeline_dir, release_mode=False, fix=(), key=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None ): """Runs all nf-core linting checks on a given Nextflow pipeline project in either `release` mode or `normal` mode (default). Returns an object @@ -40,7 +40,7 @@ def run_linting( """ # Create the lint object - lint_obj = PipelineLint(pipeline_dir, release_mode, fix, fail_ignored) + lint_obj = PipelineLint(pipeline_dir, release_mode, fix, key, fail_ignored) # Load the various pipeline configs lint_obj._load_lint_config() @@ -115,7 +115,7 @@ class PipelineLint(nf_core.utils.Pipeline): from .actions_schema_validation import actions_schema_validation from .merge_markers import merge_markers - def __init__(self, wf_path, release_mode=False, fix=(), fail_ignored=False): + def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=False): """ Initialise linting object """ # Initialise the parent object @@ -151,6 +151,7 @@ def __init__(self, wf_path, release_mode=False, fix=(), fail_ignored=False): if self.release_mode: self.lint_tests.extend(["version_consistency"]) self.fix = fix + self.key = key self.progress_bar = None def _load(self): @@ -208,6 +209,21 @@ def _lint_pipeline(self): ) ) + # Check that supplied test keys exist + bad_keys = [k for k in self.key if k not in self.lint_tests] + if len(bad_keys) > 0: + raise AssertionError( + "Test name{} not recognised: '{}'".format( + "s" if len(bad_keys) > 1 else "", + "', '".join(bad_keys), + ) + ) + + # If -k supplied, only run these tests + if self.key: + log.info("Only running tests: '{}'".format("', '".join(self.key))) + self.lint_tests = [k for k in self.lint_tests if k in self.key] + # Check that the pipeline_dir is a clean git repo if len(self.fix): log.info("Attempting to automatically fix failing tests") From f8937b239237f8917143d7b02579c64ee6befb15 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 14 Apr 2021 22:58:03 +0200 Subject: [PATCH 099/210] Update readme example to show -k multiple times --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 59b9278d89..67f55fb6e0 100644 --- a/README.md +++ b/README.md @@ -602,7 +602,7 @@ Tip: Some of these linting errors can automatically be resolved with the followi nf-core lint . --fix conda_env_yaml ``` -You can use the `-k` / `--key` flag to run only named tests for faster debugging, eg: `nf-core lint . --key files_unchanged` +You can use the `-k` / `--key` flag to run only named tests for faster debugging, eg: `nf-core lint . -k files_exist -k files_unchanged` ### Linting documentation From 08de7b4f5165f398bbab14efc5f8a9178c77cffa Mon Sep 17 00:00:00 2001 From: Pontus Freyhult Date: Thu, 15 Apr 2021 09:42:24 +0200 Subject: [PATCH 100/210] Ignore any permission errors while creating/checking requests cache directories --- CHANGELOG.md | 1 + nf_core/utils.py | 18 +++++++++++------- tests/test_utils.py | 8 ++++++++ 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2abfff1858..01124bd5dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ * Update `modules` commands to use new test tag format `tool/subtool` * Rewrite how the tools documentation is deployed to the website, to allow multiple versions * Created new Docker image for the tools cli package - see installation docs for details [[#917](https://github.com/nf-core/tools/issues/917)] +* Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory ### Template diff --git a/nf_core/utils.py b/nf_core/utils.py index 5a60e579da..18f2dcb581 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -283,13 +283,17 @@ def setup_requests_cachedir(): """ pyversion = ".".join(str(v) for v in sys.version_info[0:3]) cachedir = os.path.join(os.getenv("HOME"), os.path.join(".nfcore", "cache_" + pyversion)) - if not os.path.exists(cachedir): - os.makedirs(cachedir) - requests_cache.install_cache( - os.path.join(cachedir, "github_info"), - expire_after=datetime.timedelta(hours=1), - backend="sqlite", - ) + + try: + if not os.path.exists(cachedir): + os.makedirs(cachedir) + requests_cache.install_cache( + os.path.join(cachedir, "github_info"), + expire_after=datetime.timedelta(hours=1), + backend="sqlite", + ) + except PermissionError: + pass def wait_cli_function(poll_func, poll_every=20): diff --git a/tests/test_utils.py b/tests/test_utils.py index 962319e2a9..542a28ee28 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -96,6 +96,14 @@ def test_list_files_no_git(self): pipeline_obj._list_files() assert tmp_fn in pipeline_obj.files + @mock.patch("os.path.exists") + @mock.patch("os.makedirs") + def test_request_cant_create_cache(self, mock_mkd, mock_exists): + """Test that we don't get an error when we can't create cachedirs""" + mock_mkd.side_effect = PermissionError() + mock_exists.return_value = False + nf_core.utils.setup_requests_cachedir() + def test_pip_package_pass(self): result = nf_core.utils.pip_package("multiqc=1.10") assert type(result) == dict From 442e911ba5ba4cdcbc26881cf6157ad3452e40f6 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 15 Apr 2021 10:14:18 +0200 Subject: [PATCH 101/210] Fix mismatch === chars and colours in checkHostname() function --- nf_core/pipeline-template/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 4ddf49f734..ef3abaaac2 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -378,11 +378,11 @@ def checkHostname() { params.hostnames.each { prof, hnames -> hnames.each { hname -> if (hostname.contains(hname) && !workflow.profile.contains(prof)) { - log.error '====================================================\n' + + log.error "${c_red}====================================================${c_reset}\n" + " ${c_red}WARNING!${c_reset} You are running with `-profile $workflow.profile`\n" + " but your machine hostname is ${c_white}'$hostname'${c_reset}\n" + " ${c_yellow_bold}It's highly recommended that you use `-profile $prof${c_reset}`\n" + - '============================================================' + "${c_red}====================================================${c_reset}\n" } } } From f18208898df8204482406b9f915e540fd9b6a23f Mon Sep 17 00:00:00 2001 From: Pontus Freyhult Date: Thu, 15 Apr 2021 10:31:13 +0200 Subject: [PATCH 102/210] Fix nextflow permissions for non-root users in docker --- tools.Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools.Dockerfile b/tools.Dockerfile index ac918c37c5..37a8013dd0 100644 --- a/tools.Dockerfile +++ b/tools.Dockerfile @@ -23,7 +23,8 @@ RUN mkdir -p /usr/share/man/man1 \ # Install Nextflow RUN curl -s https://get.nextflow.io | bash \ - && mv nextflow /usr/local/bin + && mv nextflow /usr/local/bin \ + && chmod a+rx /usr/local/bin/nextflow # Add the nf-core source files to the image COPY . /usr/src/nf_core WORKDIR /usr/src/nf_core From cb06dc9379bfbe9e628407ee4f51d32a78f963ec Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 15 Apr 2021 12:12:05 +0200 Subject: [PATCH 103/210] Added questionary select list for releases --- nf_core/__main__.py | 4 ++-- nf_core/download.py | 25 +++++++++++++++++++++++-- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 9ce7cfcfdd..5f0c2007be 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -201,8 +201,8 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all # nf-core download @nf_core_cli.command(help_priority=3) -@click.argument("pipeline", metavar="") -@click.option("-r", "--release", type=str, help="Pipeline release") +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--release", is_flag=True, help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( "-c", diff --git a/nf_core/download.py b/nf_core/download.py index a5e0a88e6e..d9638e0277 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -76,7 +76,7 @@ class DownloadWorkflow(object): def __init__( self, pipeline=None, - release=None, + release_flag=False, outdir=None, compress_type="tar.gz", force=False, @@ -85,7 +85,8 @@ def __init__( parallel_downloads=4, ): self.pipeline = pipeline - self.release = release + self.release_flag = release_flag + self.release = None self.outdir = outdir self.output_filename = None self.compress_type = compress_type @@ -121,6 +122,11 @@ def download_workflow(self): style=nf_core.utils.nfcore_question_style, ).ask() + # Prompts user for release tag if '-r' was set + if self.release_flag: + release_tags = self.fetch_release_tags() + self.release = questionary.select("Select release:", release_tags).ask() + # Get workflow details try: self.fetch_workflow_details(wfs) @@ -179,6 +185,21 @@ def download_workflow(self): log.info("Compressing download..") self.compress_download() + def fetch_release_tags(self): + # Fetch releases from github api + releases_url = "https://api.github.com/repos/nf-core/{}/releases".format(self.pipeline) + response = requests.get(releases_url) + + # Filter out the release tags and sort them + release_tags = map(lambda release: release.get("tag_name", None), response.json()) + release_tags = filter(lambda tag: tag != None, release_tags) + release_tags = list(release_tags) + if len(release_tags) == 0: + log.error("Unable to find any releases!") + sys.exit(1) + release_tags = sorted(release_tags, key=lambda tag: tuple(tag.split(".")), reverse=True) + return release_tags + def fetch_workflow_details(self, wfs): """Fetches details of a nf-core workflow to download. From 519dd18d1f95c813c8ba636cc96292845d552b2d Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 15 Apr 2021 14:14:46 +0200 Subject: [PATCH 104/210] Added confirmation prompt for image download and some docs --- nf_core/__main__.py | 25 +++++++++++++++++++++---- nf_core/download.py | 29 ++++++++++++++++++++--------- 2 files changed, 41 insertions(+), 13 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 5f0c2007be..d17c0eec04 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -3,6 +3,7 @@ from click.types import File from rich import print +from rich.prompt import Confirm import click import logging import os @@ -200,6 +201,15 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all # nf-core download +def confirm_container_download(ctx, opts, value): + """Confirm choice of container""" + if value != "none": + is_satisfied = Confirm.ask(f"Should {value} image be downloaded?") + if not is_satisfied: + value = 'none' + return value + + @nf_core_cli.command(help_priority=3) @click.argument("pipeline", required=False, metavar="") @click.option("-r", "--release", is_flag=True, help="Pipeline release") @@ -212,16 +222,23 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -@click.option("-s", "--singularity", is_flag=True, default=False, help="Download singularity images") @click.option( - "-c", + "-C", + "--container", + type=click.Choice(['none', 'singularity']), + default="none", + callback=confirm_container_download, + help="Download images", +) +@click.option( + "-s", "--singularity-cache", is_flag=True, default=False, help="Don't copy images to the output directory, don't set 'singularity.cacheDir' in workflow", ) @click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads") -def download(pipeline, release, outdir, compress, force, singularity, singularity_cache, parallel_downloads): +def download(pipeline, release, outdir, compress, force, container, singularity_cache, parallel_downloads): """ Download a pipeline, nf-core/configs and pipeline singularity images. @@ -229,7 +246,7 @@ def download(pipeline, release, outdir, compress, force, singularity, singularit workflow to use relative paths to the configs and singularity images. """ dl = nf_core.download.DownloadWorkflow( - pipeline, release, outdir, compress, force, singularity, singularity_cache, parallel_downloads + pipeline, release, outdir, compress, force, container, singularity_cache, parallel_downloads ) dl.download_workflow() diff --git a/nf_core/download.py b/nf_core/download.py index d9638e0277..b0210ded3f 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -80,7 +80,7 @@ def __init__( outdir=None, compress_type="tar.gz", force=False, - singularity=False, + container='none', singularity_cache_only=False, parallel_downloads=4, ): @@ -93,13 +93,13 @@ def __init__( if self.compress_type == "none": self.compress_type = None self.force = force - self.singularity = singularity + self.singularity = container == "singularity" self.singularity_cache_only = singularity_cache_only self.parallel_downloads = parallel_downloads # Sanity checks if self.singularity_cache_only and not self.singularity: - log.error("Command has '--singularity-cache' set, but not '--singularity'") + log.error("Command has '--singularity-cache' set, but '--container' is 'none'") sys.exit(1) self.wf_name = None @@ -110,11 +110,11 @@ def __init__( def download_workflow(self): """Starts a nf-core workflow download.""" - # Fetches remote workflows + # Fetch remote workflows wfs = nf_core.list.Workflows() wfs.get_remote_workflows() - # Prompts user if pipeline name was not specified + # Prompt user if pipeline name was not specified if self.pipeline is None: self.pipeline = questionary.autocomplete( "Pipeline name:", @@ -122,10 +122,13 @@ def download_workflow(self): style=nf_core.utils.nfcore_question_style, ).ask() - # Prompts user for release tag if '-r' was set + # Prompt user for release tag if '--release' was set if self.release_flag: - release_tags = self.fetch_release_tags() - self.release = questionary.select("Select release:", release_tags).ask() + try: + release_tags = self.fetch_release_tags() + except LookupError: + sys.exit(1) + self.release = questionary.select("Select release:", choices=release_tags).ask() # Get workflow details try: @@ -186,6 +189,14 @@ def download_workflow(self): self.compress_download() def fetch_release_tags(self): + """Fetches tag names of pipeline releases from github + + Returns: + release_tags (list[str]): Returns list of release tags + + Raises: + LookupError, if no releases were found + """ # Fetch releases from github api releases_url = "https://api.github.com/repos/nf-core/{}/releases".format(self.pipeline) response = requests.get(releases_url) @@ -196,7 +207,7 @@ def fetch_release_tags(self): release_tags = list(release_tags) if len(release_tags) == 0: log.error("Unable to find any releases!") - sys.exit(1) + raise LookupError release_tags = sorted(release_tags, key=lambda tag: tuple(tag.split(".")), reverse=True) return release_tags From 77808a45ab2311de9af646b4f83bc23f8c9b316e Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 15 Apr 2021 14:34:02 +0200 Subject: [PATCH 105/210] Added prompt for singularity caching (and 'negated' prompt for image download) --- nf_core/__main__.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index d17c0eec04..0478abcbec 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -203,12 +203,19 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all # nf-core download def confirm_container_download(ctx, opts, value): """Confirm choice of container""" - if value != "none": - is_satisfied = Confirm.ask(f"Should {value} image be downloaded?") - if not is_satisfied: - value = 'none' + if value == None: + should_download = Confirm.ask(f"Should singularity image be downloaded?") + if should_download: + value = "singularity" + else: + value = "none" return value +def confirm_singularity_cache(ctx, opts, value): + """Confirm that singularity image should be cached""" + if not value: + return Confirm.ask(f"Should singularity image be cached?") + return value @nf_core_cli.command(help_priority=3) @click.argument("pipeline", required=False, metavar="") @@ -226,7 +233,7 @@ def confirm_container_download(ctx, opts, value): "-C", "--container", type=click.Choice(['none', 'singularity']), - default="none", + default=None, callback=confirm_container_download, help="Download images", ) @@ -234,6 +241,7 @@ def confirm_container_download(ctx, opts, value): "-s", "--singularity-cache", is_flag=True, + callback=confirm_singularity_cache, default=False, help="Don't copy images to the output directory, don't set 'singularity.cacheDir' in workflow", ) From f53b942289dc5a7f1105e7c944b10433b557a8bb Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 15 Apr 2021 14:44:53 +0200 Subject: [PATCH 106/210] Added prompt for compression type --- nf_core/__main__.py | 3 +-- nf_core/download.py | 16 ++++++++++++---- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 0478abcbec..fc01d25f07 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -224,8 +224,7 @@ def confirm_singularity_cache(ctx, opts, value): @click.option( "-c", "--compress", - type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), - default="tar.gz", + is_flag=True, help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") diff --git a/nf_core/download.py b/nf_core/download.py index b0210ded3f..470f6b8a9b 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -78,7 +78,7 @@ def __init__( pipeline=None, release_flag=False, outdir=None, - compress_type="tar.gz", + compress=False, force=False, container='none', singularity_cache_only=False, @@ -89,9 +89,8 @@ def __init__( self.release = None self.outdir = outdir self.output_filename = None - self.compress_type = compress_type - if self.compress_type == "none": - self.compress_type = None + self.compress = compress + self.compress_type = None self.force = force self.singularity = container == "singularity" self.singularity_cache_only = singularity_cache_only @@ -183,6 +182,15 @@ def download_workflow(self): self.find_container_images() self.get_singularity_images() + # If '--compress' flag was set, ask user what compression type to be used + if self.compress: + self.compress_type = questionary.select( + "Choose compression type:", + choices=["none", "tar.gz", "tar.bz2", "zip",] + ).ask() + if self.compress_type == "none": + self.compress_type = None + # Compress into an archive if self.compress_type is not None: log.info("Compressing download..") From 958e95bc055f779175f36139d3971dcdcc084873 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Thu, 15 Apr 2021 16:03:02 +0200 Subject: [PATCH 107/210] Added checking for 'export NXF_SINGULARITY_CACHEDIR' in bashrc --- nf_core/__main__.py | 4 +++- nf_core/download.py | 24 ++++++++++++++++++++---- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index fc01d25f07..2452717e74 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -211,12 +211,14 @@ def confirm_container_download(ctx, opts, value): value = "none" return value + def confirm_singularity_cache(ctx, opts, value): """Confirm that singularity image should be cached""" if not value: return Confirm.ask(f"Should singularity image be cached?") return value + @nf_core_cli.command(help_priority=3) @click.argument("pipeline", required=False, metavar="") @click.option("-r", "--release", is_flag=True, help="Pipeline release") @@ -231,7 +233,7 @@ def confirm_singularity_cache(ctx, opts, value): @click.option( "-C", "--container", - type=click.Choice(['none', 'singularity']), + type=click.Choice(["none", "singularity"]), default=None, callback=confirm_container_download, help="Download images", diff --git a/nf_core/download.py b/nf_core/download.py index 470f6b8a9b..f9a81e90c2 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -18,6 +18,7 @@ import tarfile import concurrent.futures from rich.progress import BarColumn, DownloadColumn, TransferSpeedColumn, Progress +from rich.prompt import Confirm from zipfile import ZipFile import nf_core @@ -80,7 +81,7 @@ def __init__( outdir=None, compress=False, force=False, - container='none', + container="none", singularity_cache_only=False, parallel_downloads=4, ): @@ -139,8 +140,18 @@ def download_workflow(self): "Pipeline release: '{}'".format(self.release), "Pull singularity containers: '{}'".format("Yes" if self.singularity else "No"), ] - if self.singularity and os.environ.get("NXF_SINGULARITY_CACHEDIR"): - summary_log.append("Using '$NXF_SINGULARITY_CACHEDIR': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"])) + if self.singularity: + export_in_file = ( + os.popen('grep -c "export NXF_SINGULARITY_CACHEDIR" ~/.bashrc').read().strip("\n") != "0" + ) + if not export_in_file: + append_to_file = Confirm.ask("Add 'export NXF_SINGULARITY_CACHEDIR' to .bashrc?") + if append_to_file: + os.system('echo "export NXF_SINGULARITY_CACHEDIR" >> ~/.bashrc') + if os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: + summary_log.append( + "Using '$NXF_SINGULARITY_CACHEDIR': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) + ) # Set an output filename now that we have the outdir if self.compress_type is not None: @@ -186,7 +197,12 @@ def download_workflow(self): if self.compress: self.compress_type = questionary.select( "Choose compression type:", - choices=["none", "tar.gz", "tar.bz2", "zip",] + choices=[ + "none", + "tar.gz", + "tar.bz2", + "zip", + ], ).ask() if self.compress_type == "none": self.compress_type = None From e262e1dc59f6aab2dd86fbc052928704fc90dc6f Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 16 Apr 2021 08:59:16 +0200 Subject: [PATCH 108/210] Add missing space --- nf_core/list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/list.py b/nf_core/list.py index 327ae1a879..3278d8e1ff 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -71,7 +71,7 @@ def get_local_wf(workflow, revision=None): log.info("Downloading workflow: {} ({})".format(workflow, revision)) pull_cmd = f"nextflow pull {workflow}" if revision is not None: - pull_cmd += f"-r {revision}" + pull_cmd += f" -r {revision}" nf_pull_output = nf_core.utils.nextflow_cmd(pull_cmd) local_wf = LocalWorkflow(workflow) local_wf.get_local_nf_workflow_details() From 897bae3691df8a262988b95e106681dee7874f77 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 16 Apr 2021 08:56:33 +0200 Subject: [PATCH 109/210] Added version selection list for launch --- nf_core/download.py | 4 +--- nf_core/launch.py | 44 ++++++++++++++++++++++++++++++++++++++++++-- nf_core/list.py | 2 +- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index f9a81e90c2..0395ada187 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -141,9 +141,7 @@ def download_workflow(self): "Pull singularity containers: '{}'".format("Yes" if self.singularity else "No"), ] if self.singularity: - export_in_file = ( - os.popen('grep -c "export NXF_SINGULARITY_CACHEDIR" ~/.bashrc').read().strip("\n") != "0" - ) + export_in_file = os.popen('grep -c "export NXF_SINGULARITY_CACHEDIR" ~/.bashrc').read().strip("\n") != "0" if not export_in_file: append_to_file = Confirm.ask("Add 'export NXF_SINGULARITY_CACHEDIR' to .bashrc?") if append_to_file: diff --git a/nf_core/launch.py b/nf_core/launch.py index ce571f373c..5e304892d3 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -14,8 +14,9 @@ import re import subprocess import webbrowser +import requests -import nf_core.schema, nf_core.utils +import nf_core.schema, nf_core.utils, nf_core.download log = logging.getLogger(__name__) @@ -176,6 +177,18 @@ def get_pipeline_schema(self): # Assume nf-core if no org given if self.pipeline.count("/") == 0: self.nextflow_cmd = "nextflow run nf-core/{}".format(self.pipeline) + + if not self.pipeline_revision: + check_for_releases = Confirm.ask("Would you like to select a specific release?") + if check_for_releases: + try: + release_tags = self.try_fetch_release_tags() + self.pipeline_revision = questionary.select( + "Please select a release:", choices=release_tags + ).ask() + except LookupError: + pass + # Add revision flag to commands if set if self.pipeline_revision: self.nextflow_cmd += " -r {}".format(self.pipeline_revision) @@ -184,10 +197,11 @@ def get_pipeline_schema(self): try: self.schema_obj.get_schema_path(self.pipeline, revision=self.pipeline_revision) self.schema_obj.load_lint_schema() - except AssertionError: + except AssertionError as a: # No schema found # Check that this was actually a pipeline if self.schema_obj.pipeline_dir is None or not os.path.exists(self.schema_obj.pipeline_dir): + log.info(f"dir: {a}") log.error("Could not find pipeline: {} ({})".format(self.pipeline, self.schema_obj.pipeline_dir)) return False if not os.path.exists(os.path.join(self.schema_obj.pipeline_dir, "nextflow.config")) and not os.path.exists( @@ -208,6 +222,32 @@ def get_pipeline_schema(self): log.error("Could not build pipeline schema: {}".format(e)) return False + def try_fetch_release_tags(self): + """Tries to fetch tag names of pipeline releases from github + + Returns: + release_tags (list[str]): Returns list of release tags + + Raises: + LookupError, if no releases were found + """ + # Fetch releases from github api + releases_url = "https://api.github.com/repos/nf-core/{}/releases".format(self.pipeline) + response = requests.get(releases_url) + if not response.ok: + log.error(f"Unable to find any release tags for {self.pipeline}. Will try to continue launch.") + raise LookupError + + # Filter out the release tags and sort them + release_tags = map(lambda release: release.get("tag_name", None), response.json()) + release_tags = filter(lambda tag: tag != None, release_tags) + release_tags = list(release_tags) + if len(release_tags) == 0: + log.error(f"Unable to find any release tags for {self.pipeline}. Will try to continue launch.") + raise LookupError + release_tags = sorted(release_tags, key=lambda tag: tuple(tag.split(".")), reverse=True) + return release_tags + def set_schema_inputs(self): """ Take the loaded schema and set the defaults as the input parameters diff --git a/nf_core/list.py b/nf_core/list.py index 327ae1a879..3278d8e1ff 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -71,7 +71,7 @@ def get_local_wf(workflow, revision=None): log.info("Downloading workflow: {} ({})".format(workflow, revision)) pull_cmd = f"nextflow pull {workflow}" if revision is not None: - pull_cmd += f"-r {revision}" + pull_cmd += f" -r {revision}" nf_pull_output = nf_core.utils.nextflow_cmd(pull_cmd) local_wf = LocalWorkflow(workflow) local_wf.get_local_nf_workflow_details() From 790e980f4153441147fb1e3b6ef5f3a3905dd1de Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 16 Apr 2021 10:47:59 +0200 Subject: [PATCH 110/210] fixed bug #1011 --- CHANGELOG.md | 1 + nf_core/module-template/software/main.nf | 2 +- nf_core/modules/create.py | 5 +++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01325e390e..4ca5450c58 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ * Added a timestamp to the trace + timetime + report + dag filenames to fix overwrite issue on AWS * Rewrite the `params_summary_log()` function to properly ignore unset params and have nicer formatting [[#971](https://github.com/nf-core/tools/issues/971)] * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +* Added `tool_name_underscore` to the module template to allow TOOL_SUBTOOL in `main.nf` [[#1011](https://github.com/nf-core/tools/issues/1011)] ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] diff --git a/nf_core/module-template/software/main.nf b/nf_core/module-template/software/main.nf index 6d6fabc8c8..c80ea5a038 100644 --- a/nf_core/module-template/software/main.nf +++ b/nf_core/module-template/software/main.nf @@ -20,7 +20,7 @@ include { initOptions; saveFiles; getSoftwareName } from './functions' params.options = [:] options = initOptions(params.options) -process {{ tool_name|upper }} { +process {{ tool_name_underscore|upper }} { tag {{ '"$meta.id"' if has_meta else "'$bam'" }} label '{{ process_label }}' publishDir "${params.outdir}", diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index 5c2253abf6..878528259f 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -33,9 +33,8 @@ def __init__( self.process_label = process_label self.has_meta = has_meta self.force_overwrite = force - - self.tool_conda_name = conda_name self.subtool = None + self.tool_conda_name = conda_name self.tool_licence = None self.repo_type = None self.tool_licence = "" @@ -116,6 +115,8 @@ def create(self): self.tool_name = f"{self.tool}/{self.subtool}" self.tool_dir = os.path.join(self.tool, self.subtool) + self.tool_name_underscore = self.tool_name.replace("/", "_") + # Check existance of directories early for fast-fail self.file_paths = self.get_module_dirs() From 6aae0b000b73f8f6570a2ce78f9de8fe38de4c80 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 16 Apr 2021 12:27:34 +0200 Subject: [PATCH 111/210] fix test.yml template --- nf_core/module-template/tests/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/module-template/tests/main.nf b/nf_core/module-template/tests/main.nf index d062eba787..a1276b800e 100644 --- a/nf_core/module-template/tests/main.nf +++ b/nf_core/module-template/tests/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { {{ tool_name|upper }} } from '../../../{{ "../" if subtool else "" }}software/{{ tool_dir }}/main.nf' addParams( options: [:] ) +include { {{ tool_name_underscore|upper }} } from '../../../{{ "../" if subtool else "" }}software/{{ tool_dir }}/main.nf' addParams( options: [:] ) -workflow test_{{ tool_name }} { +workflow test_{{ tool_name_underscore }} { {% if has_meta %} input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] @@ -12,5 +12,5 @@ workflow test_{{ tool_name }} { input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} - {{ tool_name|upper }} ( input ) + {{ tool_name_underscore|upper }} ( input ) } From 30c18b53112fcb972c0ce808ffaf1032c98f7155 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 16 Apr 2021 13:15:33 +0200 Subject: [PATCH 112/210] fixed test.yml template --- nf_core/module-template/tests/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/module-template/tests/test.yml b/nf_core/module-template/tests/test.yml index 8410f23c5d..ba807849cc 100644 --- a/nf_core/module-template/tests/test.yml +++ b/nf_core/module-template/tests/test.yml @@ -1,7 +1,7 @@ ## TODO nf-core: Please run the following command to build this file: # nf-core modules create-test-yml {{ tool }}{%- if subtool %}/{{ subtool }}{%- endif %} - name: {{ tool }}{{ ' '+subtool if subtool else '' }} - command: nextflow run ./tests/software/{{ tool_dir }} -entry test_{{ tool_name }} -c tests/config/nextflow.config + command: nextflow run ./tests/software/{{ tool_dir }} -entry test_{{ tool_name_underscore }} -c tests/config/nextflow.config tags: - {{ tool }} {%- if subtool %} From d15d4e6b1cea17318073d8b83bf9a66de7729bee Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 16 Apr 2021 13:21:06 +0200 Subject: [PATCH 113/210] fix meta.yml template --- nf_core/module-template/software/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/module-template/software/meta.yml b/nf_core/module-template/software/meta.yml index a5116432be..be6d3e5f93 100644 --- a/nf_core/module-template/software/meta.yml +++ b/nf_core/module-template/software/meta.yml @@ -1,4 +1,4 @@ -name: {{ tool_name }} +name: {{ tool_name_underscore }} ## TODO nf-core: Add a description of the module and list keywords description: write your description here keywords: From 89ff0add111d0aacbd9475e683a1ed86e3cc32ee Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 16 Apr 2021 13:43:43 +0200 Subject: [PATCH 114/210] Added path prompt --- nf_core/download.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 0395ada187..683678188d 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -18,7 +18,7 @@ import tarfile import concurrent.futures from rich.progress import BarColumn, DownloadColumn, TransferSpeedColumn, Progress -from rich.prompt import Confirm +from rich.prompt import Confirm, Prompt from zipfile import ZipFile import nf_core @@ -145,7 +145,14 @@ def download_workflow(self): if not export_in_file: append_to_file = Confirm.ask("Add 'export NXF_SINGULARITY_CACHEDIR' to .bashrc?") if append_to_file: - os.system('echo "export NXF_SINGULARITY_CACHEDIR" >> ~/.bashrc') + path = Prompt.ask("Specify the path: ") + try: + with open(os.path.expanduser("~/.bashrc"), "a") as f: + f.write(f'export NXF_SINGULARITY_CACHEDIR={path}\n') + log.info("Successfully wrote to ~/.bashrc") + except FileNotFoundError: + log.error("Unable to find ~/.bashrc") + sys.exit(1) if os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: summary_log.append( "Using '$NXF_SINGULARITY_CACHEDIR': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) From b9336f7132d5e027046706a9c5d3e9485f767128 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Fri, 16 Apr 2021 13:58:50 +0200 Subject: [PATCH 115/210] add newline to functions.nf --- nf_core/module-template/software/functions.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/module-template/software/functions.nf b/nf_core/module-template/software/functions.nf index 078fda9d21..9d0137e374 100644 --- a/nf_core/module-template/software/functions.nf +++ b/nf_core/module-template/software/functions.nf @@ -67,4 +67,4 @@ def saveFiles(Map args) { return "${getPathFromList(path_list)}/$args.filename" } } -} \ No newline at end of file +} From bf4b64a7c411e7d874d96f2e3afb392c6bcfb0c2 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 16 Apr 2021 14:15:38 +0200 Subject: [PATCH 116/210] Changed to only prompt when options are not specified --- nf_core/__main__.py | 26 +++------------------ nf_core/download.py | 57 +++++++++++++++++++++++++++++---------------- 2 files changed, 40 insertions(+), 43 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 2452717e74..b0948fea7d 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -201,32 +201,16 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all # nf-core download -def confirm_container_download(ctx, opts, value): - """Confirm choice of container""" - if value == None: - should_download = Confirm.ask(f"Should singularity image be downloaded?") - if should_download: - value = "singularity" - else: - value = "none" - return value - - -def confirm_singularity_cache(ctx, opts, value): - """Confirm that singularity image should be cached""" - if not value: - return Confirm.ask(f"Should singularity image be cached?") - return value @nf_core_cli.command(help_priority=3) @click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--release", is_flag=True, help="Pipeline release") +@click.option("-r", "--release", help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( "-c", "--compress", - is_flag=True, + type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @@ -234,16 +218,12 @@ def confirm_singularity_cache(ctx, opts, value): "-C", "--container", type=click.Choice(["none", "singularity"]), - default=None, - callback=confirm_container_download, help="Download images", ) @click.option( "-s", "--singularity-cache", - is_flag=True, - callback=confirm_singularity_cache, - default=False, + type=click.Choice(["yes", "no"]), help="Don't copy images to the output directory, don't set 'singularity.cacheDir' in workflow", ) @click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads") diff --git a/nf_core/download.py b/nf_core/download.py index 683678188d..bddf0598b1 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -77,24 +77,32 @@ class DownloadWorkflow(object): def __init__( self, pipeline=None, - release_flag=False, + release=None, outdir=None, - compress=False, + compress_type=None, force=False, container="none", singularity_cache_only=False, parallel_downloads=4, ): self.pipeline = pipeline - self.release_flag = release_flag - self.release = None + self.release = release self.outdir = outdir self.output_filename = None - self.compress = compress - self.compress_type = None + self.compress_type = compress_type + if self.compress_type is None: + self.compress_type = self._confirm_compression() + if self.compress_type == "none": + self.compress_type = None + self.force = force + + if container is None: + container = self._confirm_container_download() self.singularity = container == "singularity" self.singularity_cache_only = singularity_cache_only + if self.singularity_cache_only is None and self.singularity: + self.singularity_cache_only = self._confirm_singularity_cache() self.parallel_downloads = parallel_downloads # Sanity checks @@ -108,6 +116,27 @@ def __init__( self.nf_config = dict() self.containers = list() + def _confirm_compression(self): + return questionary.select( + "Choose compression type:", + choices=[ + "none", + "tar.gz", + "tar.bz2", + "zip", + ], + ).ask() + + def _confirm_container_download(self): + should_download = Confirm.ask(f"Should singularity image be downloaded?") + if should_download: + return "singularity" + else: + return "none" + + def _confirm_singularity_cache(self): + return Confirm.ask(f"Should singularity image be cached?") + def download_workflow(self): """Starts a nf-core workflow download.""" # Fetch remote workflows @@ -123,7 +152,7 @@ def download_workflow(self): ).ask() # Prompt user for release tag if '--release' was set - if self.release_flag: + if self.release is None: try: release_tags = self.fetch_release_tags() except LookupError: @@ -148,7 +177,7 @@ def download_workflow(self): path = Prompt.ask("Specify the path: ") try: with open(os.path.expanduser("~/.bashrc"), "a") as f: - f.write(f'export NXF_SINGULARITY_CACHEDIR={path}\n') + f.write(f"export NXF_SINGULARITY_CACHEDIR={path}\n") log.info("Successfully wrote to ~/.bashrc") except FileNotFoundError: log.error("Unable to find ~/.bashrc") @@ -199,18 +228,6 @@ def download_workflow(self): self.get_singularity_images() # If '--compress' flag was set, ask user what compression type to be used - if self.compress: - self.compress_type = questionary.select( - "Choose compression type:", - choices=[ - "none", - "tar.gz", - "tar.bz2", - "zip", - ], - ).ask() - if self.compress_type == "none": - self.compress_type = None # Compress into an archive if self.compress_type is not None: From 8a945e7039a84b93fca3f75e8062d3554b919685 Mon Sep 17 00:00:00 2001 From: Erik Danielsson Date: Fri, 16 Apr 2021 19:54:08 +0200 Subject: [PATCH 117/210] Clean up some unnecessary changes --- nf_core/__main__.py | 2 +- nf_core/download.py | 2 -- nf_core/launch.py | 5 ++--- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index b0948fea7d..272cf97087 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -205,7 +205,7 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all @nf_core_cli.command(help_priority=3) @click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--release", help="Pipeline release") +@click.option("-r", "--release", type=str help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( "-c", diff --git a/nf_core/download.py b/nf_core/download.py index bddf0598b1..6a9995e91e 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -227,8 +227,6 @@ def download_workflow(self): self.find_container_images() self.get_singularity_images() - # If '--compress' flag was set, ask user what compression type to be used - # Compress into an archive if self.compress_type is not None: log.info("Compressing download..") diff --git a/nf_core/launch.py b/nf_core/launch.py index 5e304892d3..78bca4aa23 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -16,7 +16,7 @@ import webbrowser import requests -import nf_core.schema, nf_core.utils, nf_core.download +import nf_core.schema, nf_core.utils log = logging.getLogger(__name__) @@ -197,11 +197,10 @@ def get_pipeline_schema(self): try: self.schema_obj.get_schema_path(self.pipeline, revision=self.pipeline_revision) self.schema_obj.load_lint_schema() - except AssertionError as a: + except AssertionError: # No schema found # Check that this was actually a pipeline if self.schema_obj.pipeline_dir is None or not os.path.exists(self.schema_obj.pipeline_dir): - log.info(f"dir: {a}") log.error("Could not find pipeline: {} ({})".format(self.pipeline, self.schema_obj.pipeline_dir)) return False if not os.path.exists(os.path.join(self.schema_obj.pipeline_dir, "nextflow.config")) and not os.path.exists( From e7e58d9cd7daeb6f4fcd7c27d20a1764f2e055ed Mon Sep 17 00:00:00 2001 From: Pontus Freyhult Date: Thu, 15 Apr 2021 11:20:00 +0200 Subject: [PATCH 118/210] Do not pick up locally installed python packages --- tools.Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools.Dockerfile b/tools.Dockerfile index ac918c37c5..2915a876c1 100644 --- a/tools.Dockerfile +++ b/tools.Dockerfile @@ -2,6 +2,9 @@ FROM python:3.8.9-slim LABEL authors="phil.ewels@scilifelab.se,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for the nfcore tools" +# Do not pick up python packages from $HOME +ENV PYTHONNUSERSITE=1 + # Update pip to latest version RUN python -m pip install --upgrade pip From a751a6615e01bdc8e21f3e6283287b08d0e529e2 Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Wed, 21 Apr 2021 14:12:22 +0100 Subject: [PATCH 119/210] Module lint uses repository and branch specified on command line --- nf_core/__main__.py | 1 + nf_core/modules/lint.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index fe4932759b..064872fe43 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -510,6 +510,7 @@ def lint(ctx, pipeline_dir, tool, all, local, passed): """ try: module_lint = nf_core.modules.ModuleLint(dir=pipeline_dir) + module_lint.modules_repo = ctx.obj["modules_repo_obj"] module_lint.lint(module=tool, all_modules=all, print_results=True, local=local, show_passed=passed) except nf_core.modules.lint.ModuleLintException as e: log.error(e) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index c5dc501a5b..a17d9419fc 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -25,6 +25,7 @@ import sys import nf_core.utils +from .pipeline_modules import ModulesRepo log = logging.getLogger(__name__) @@ -45,7 +46,6 @@ def __init__(self, mod, lint_test, message, file_path): self.file_path = file_path self.module_name = mod.module_name - class ModuleLint(object): """ An object for linting modules either in a clone of the 'nf-core/modules' @@ -58,6 +58,7 @@ def __init__(self, dir): self.passed = [] self.warned = [] self.failed = [] + self.modules_repo = ModulesRepo() def lint(self, module=None, all_modules=False, print_results=True, show_passed=False, local=False): """ @@ -391,7 +392,7 @@ def check_module_changes(self, nfcore_modules): for mod in nfcore_modules: progress_bar.update(comparison_progress, advance=1, test_name=mod.module_name) module_base_url = ( - f"https://raw.githubusercontent.com/nf-core/modules/master/software/{mod.module_name}/" + f"https://raw.githubusercontent.com/{self.modules_repo.name}/{self.modules_repo.branch}/software/{mod.module_name}/" ) for f in files_to_check: From 3066f7c1bd96c7966fda170137b9bbc94e6b2084 Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Wed, 21 Apr 2021 14:34:25 +0100 Subject: [PATCH 120/210] black reformatting --- nf_core/modules/lint.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index a17d9419fc..f8517fdb7f 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -46,6 +46,7 @@ def __init__(self, mod, lint_test, message, file_path): self.file_path = file_path self.module_name = mod.module_name + class ModuleLint(object): """ An object for linting modules either in a clone of the 'nf-core/modules' @@ -391,9 +392,7 @@ def check_module_changes(self, nfcore_modules): # Loop over nf-core modules for mod in nfcore_modules: progress_bar.update(comparison_progress, advance=1, test_name=mod.module_name) - module_base_url = ( - f"https://raw.githubusercontent.com/{self.modules_repo.name}/{self.modules_repo.branch}/software/{mod.module_name}/" - ) + module_base_url = f"https://raw.githubusercontent.com/{self.modules_repo.name}/{self.modules_repo.branch}/software/{mod.module_name}/" for f in files_to_check: # open local copy, continue if file not found (a failed message has already been issued in this case) From 85698de2904f787368c9935c02dac23101e34c01 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Sun, 25 Apr 2021 07:51:20 +0200 Subject: [PATCH 121/210] Ignore vim tmp files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index e981592bef..271fdb14e3 100644 --- a/.gitignore +++ b/.gitignore @@ -114,3 +114,4 @@ ENV/ .idea pip-wheel-metadata .vscode +.*.sw? From d9e09061c9ebd85e767cc0f8fcd0626b8b358e64 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Sun, 25 Apr 2021 07:51:55 +0200 Subject: [PATCH 122/210] Corrected spelling mistake and added help --- nf_core/modules/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index ed7f778bfc..8fb52fe566 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -173,7 +173,7 @@ def create(self): github_username_regex = re.compile(r"^@[a-zA-Z\d](?:[a-zA-Z\d]|-(?=[a-zA-Z\d])){0,38}$") while self.author is None or not github_username_regex.match(self.author): if self.author is not None and not github_username_regex.match(self.author): - log.warning("Does not look like a value GitHub username!") + log.warning("Does not look like a valid GitHub username (must start with an '@')!") self.author = rich.prompt.Prompt.ask( "[violet]GitHub Username:[/]{}".format(" (@author)" if author_default is None else ""), default=author_default, From 65e5558bfda9ff60e2f2563926cf9dfacc18ead3 Mon Sep 17 00:00:00 2001 From: MaxUlysse Date: Mon, 26 Apr 2021 12:51:35 +0200 Subject: [PATCH 123/210] feat: remove social preview image to use GitHub OpenGraph --- assets/nf-core-tools_social_preview.png | Bin 46327 -> 0 bytes assets/nf-core-tools_social_preview.svg | 446 ------------------------ 2 files changed, 446 deletions(-) delete mode 100644 assets/nf-core-tools_social_preview.png delete mode 100644 assets/nf-core-tools_social_preview.svg diff --git a/assets/nf-core-tools_social_preview.png b/assets/nf-core-tools_social_preview.png deleted file mode 100644 index c643f4d4c8da0f9f2cfb1b12ae1d10a53f032f72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 46327 zcmeFZ`9D?R`!2qSW|~MDD`_B<44IV>Wwy6@O6HJx9-2#rOewR-%r?o4Y!yXlB-(}W}sog(=1f} z?DGy!Igb+*H*21}qBbRwWw2Cw8b|4|efrKfHTAVOPX_q=eOEbVoaYyDPJu4@_3{Po z^HlpM#np!-k77lx#|+;yTin*`Gw?L!8y8l-f2VYuF1l};j6M;(q_pz)l~eqCkMy2J zsQ$hEe2U`CzjsF)_9Op(?{`%e+5hi{&#qBQ{d;#N;21*r@2znFa`fN3{U%g?|K3TR zV*j5@{?Azc&muzpCwdU%S>7unp}&bCtwN6{QZPq9vvs=9l9-|>QsP^k&v!i{K9Wm5 zzkfCZoAhF;CljaRJe8(eU^X={dSAe@^Oj&V_gB&$DT`e9N&~yx=)YIeU^T#LX1?>r zDesEPaaC_BJ*TFY=)03)pfB#R+R5!Ym?*0lt=++aJUc=ug`Oc*<5m9Xu|YJyyr=6d zzZ!3%6{d1^J<|V8u#nmjBwDRsm6(AB?3@E!Wy-go`P4kw=I-L?7i z=cw~dVlg~hih_cFM09L+VWnq8#0Ph!qVkoQbaR8atUIaQPDJD=R*T|qqRstc&elda z6uhm-8a!Uo$U8^WeEM=YrHxeHWb4YjS{0AY?qaiJ@@Xg&j8+t)Vaicm-IWgs32E5e z>-VVcpcHx&e)f{tI_dl7O7wXmr@x!LmY4VVGk?OlX)H3{C8lqhP;#uU(5E#@y{g(Y zT99$`kDK$%0Ku&tIhymIyf4cl@-|U**EKA^AU{YPo2kUIZN0%{)lssb-N+{4bZ2#Y zMDWwxkn>F5p@Cw;E)83vPoKfXa+0n5@c(fU@x{hG1?|F#@5BSIUwbUfy7nZyFFEF@ zVs$d?7E34{_!t4 zNP>e2i61?pWLklA8Zu)b9tEU|Iv`|Qh#BSRibC3sx*anxYg9{_ZCE`tZl9Nzw^7Ho zV{}t4qUB?|g!tTSmFCJ3wTp<~2^lK1Q2TIKS7_~1$dAyDN_7avfO@9CC66WdAi<8>TGvhtZ1-jmRx+*&8fY}qjQu} zh*SqNPix=mOF>KW2+KeeURif(COx;;f`tn`NP$tP-OA3^A;VdT!7>3UPZ@7)-=VYI3iQPT2+M2nmW zprfasY!w<_ww@`mAN?k%E^njdxiwo&W)wV(4x>7XakMhN=%=I;t2@GN;?uv49&{0f z6|vmFWOqqalsInR8ZFdU)MsD~lklcxOl@GOvQZpc;4*v085RUR44R**;Mg3YgdS(5 zJW^|fr$qLvLl)w#-pV&LX$)d%5yb52QeSNz6WNieni8p&p2*ZwVvYO=;+Pbmv4Gk-7?Nv@h&`OS?hQ0)0-1i$RiOwN=oGF&*5mvgD!^MEr&33!aiPJ4+4&#Gl@8Qn(JlYE*3GOgmL^)G0XY2 zW2#Q{3285q5W4pB4R4`o%gMikZ~Z;j7;d4ol#~<3dRO#qN1E!B_~JcOP_k?myN0Lo_k87zRi?=ylXiJkQ;Vv zGX|Wv8#7GE(Nj%LO+JC;qd}tKZ{He~ME}A3H6Hd0~Gl=Bj= z+HIe_{BAW&!q2dVl$6(X-w zx)xHTL(Z7WoD5d_`PIh#NW}Bj7Vh(w*02cw=bNr~Rp=_OWq zm&3fyF3ncElQvG}oimZH4oHj{Siq5LGDEIZBg)>Yl;_VZn=iPKZA-E2C` zn#3=%vetW`CQ_p_FmqYV!g7s)?Q{|kAu1E=gsB(#7>;>|tAuerYmOJIctMRsC^au;m#%HH{_{!tdh<%ux?5!=yw}#=KdK;IVKS(I(r$`hHOoq@_YZk1D zG)(Q0Dlz(dYZZ9w*Q}e9EThi47X^DtdWF&qW83nH4=+|qR^GU4w_TxPH?Ma-vauEF zY8utz(h?!z=AN;!v2`(*kE<%RSi1 zHqte)g#s>$VE;lniwAFZ%2EkrfBPHUANC-(j{RM!0lN#_twRMp z7OhN-23e^h!7AbGi&^cYP`t7$x3B#Cs{1Pl!_R}s@?lQH-mB|kIX1-Nfn4LK4^!Q4 zULN#zeH3OuTKVwKe8b1m|FZr$t8^=?AVFJwZm5FMQ8t?ykqG59I3aE4Z%FJAR90@uaqh zVNseJhimy=l67Ie6DzWxn#vDBs@*FLoa20h{Yj<`R#lzTKI9xio0d2t<+irmtt{~9 zhg5tcCR+!yxiYIB!HKytk#T?HymKk5wy1RdT$d5vA@qiB_{Ogw1VvNdHq3^w&TShY(@06iGOTR{AyWq#Hzw z$~Hn2^{-$GM9QKcg6rfJtshG_Ue+aEGilK3P^}+b@mQ|Ou=Mb5)U34~i}4yAxQL@H zIL(c#xWgW~eeq0WJR&6{1B3sESbMxc*EOo1a82h3a+LLgV$}KI##LRDeKd^8DT>iL zaIv2enn!T%((x(Thh4bndlN(T#LHY{A+7o)prvh%J2k7uqO#N6 zF$@MU3AH^EqVF!8HJnK5F|D;t`n+ki%P66YaB@>ZMH^Na9Fv`{7eT>Oqvvc-Xu+e> zgG|ueX7q%W*>v)^q?Q6{EGl<}yR@>(E;uKP&%c1Y(k4@Dy9c4fquzc{Ky~5&QNx`Er~CBN}QNF&V<6nAX#{y%?*nmPcakT0gzeM9z85 z3|AYbr>862y{m3!mNICIhnm8{A{;Jx_Uz5quV4EI2W#r-#aIzCKK*Wu=Hlby`x+b1 z7TNNNTesL^dxJcHw~O!U>I#pG%UND_iUK(@zfZsKAw;4sJJzv9{;o-iUjySgg*G2FhJ=`2__S9i5E9 z!9kO8BFFjjUniY10J>Ry&rOUHvdb}RjY^k(NoxhCzm*KFiI()ODz+j1UMgV}u$p+M zYnzvlUi?+DYfhm86u;HUF?Eh((ij)Xk53K z+ptQH3!Bx^9Klrt$R*<4#?JhibLVcgMhU1xB_WhKTNl}qbYewavE?qabjMV$`I=t7 zbgAQK{TuRFq@mUN;@CimeUc&D=HzWz*$l{9Q@z>Idk>#V*xBCXx^}HoYuo<}H@D)j z?`~z)=A3A@Ua4BNppBYR&@RJaKuMX-)4fwfJPtjBE2rcZ?aMl&#&NUm6vZe}*Bmu9 zwdJ}ak}-XaP8zcVMTAskHhuegdPXs~+zd^eZ9=y{x3F+O&OS*tQ0?Bm0Dw;Kvbww! zi94pSsO4YYT_T%qSm_TZ$mQx%`1$3zR}R$-R?;G8GrsGmKxRivl;E$K&W#w-fc_(? zIK+F?>H7?fQt1X^GWju$N1D>T&@o4zX01$))6;Dm6>v_GggHLXoi}4hjr-@eZspRr zi_qn3e7#z{H^hS*lkBRuv25PUArd=lCbL78#IE)GCMM1!&l#o`TX;2hceV)y=Isf4 zpKx$+pwBntwWR0`NQ;x}(4e;3v17-kNagq!N7!0mD3>m zPeBs7aT69;ChdF&)ayb%$LQX%nRhLtv@W`#LC+pX_D!Y{qgFM88MCiMk4r{NcD3k^ z>tcrw-rXn0_V!UBUrSR{=QOT5rzz~BHGt@w)9jZo%Rk*q)AghFDIi2m>W9vKut>;* zj_s4#_#rhH7Z<7DVqsst+@@g>(KsTpmcS@v%RJ-KW-&ih86{x-u4-%X?x*DBQ~Dh4 zIk3tywF}JBmE$EEtnG#>w0F10CCP1~^HgId<$pdOB7ZrdZ1JP!F-1I4!FAXhk9KcM zCAClYS@&jXL-w-|ch@UhZrK;1J+(P@>BeWr3;&OSL}8Yi-$oiIc_zNj)X5Xif3^ancBh z2-P&m7OBt=&WTNwFceF>X}GJuJG2$z(>!$gH~_Uz%!6r6kz(l#c(3Dp;_QWPVyudB zV%m^pd%Be+^+!HS$}1=;5|V-?GQ2lewAFlvD%`F5a$@7WSNad0-jP?cv`pVwir-B$ zZF%RhN^*Tl3VO$Dm@`uMG83{M8oj^U;|e*R^y9yfmRkKpzW(R{g= zE|IK_yb<3dLFC!c-6zP8q^BIhj3sOK-r;3euGCd06_2M{e>=jP`Guu~N&c7*AerQ| z(l0ay{@6iuPW2aa~{p;_&H@KHD3Zs6RNvYM-;mc31>F zS01AA&8lp%J#$w{X~lXFqJfTu#ik_M-`qW=NXT4MkjZhCs+=0xzaNs2ar-R!Yi6_k zaHn#=D0xk}R&vkb#Ku{d>74SpF7H(mgM;zo%kJ*(Wjh{-d~I2 zGgZjdJrD(-wLRxr-CgE9{e06N2>0aWU+)F{*BULo z`mJa@&Qs~S*|+^JOyj@m84SbwrZ?ZXJl%H$3wQ=P`}<8-TDylnOG6xI&2XMUs8oc} zdpPBBpX6y=NjhyNzT6=lC1|4q1ur8t&O_l41J7X>rgz_qZK7?f*3GJK(cf7e^2A}Y z_HQ$3LK@EO>FF`T+sg_I3ww;mx^DgL@#{z$_o5y5><;Iu-=f)08k33YPsE$K&SYE8 zdRSRhVjTupDCX-Ap01Br`g#b7I6`p-c?NLfv`+4BqY?h`(Lc>xxQX@g_#-Y%H;a6> zJ;cd9>b16YYHv<*cR|8phT_uJlddk{y|HYBH@uM?AsN|7rnd(P2 ziI~aXGQtBdf;KzGRsfhFH*l5&G6_w!`|dcxuy8_;Bsy0jZE8B=>)r-%Vy3S(BLV|! zgw^X0usy~-kaTl|9e-1m2dX=ilcL1jf%ciN-t~cD(R+LYmji>sJvAuJexikLOk{h3 z!o@Ymhet6NfiT(9N1^SW8@5DZZw(>-CD~uIejJYKhmX>JDo6mGji49B0 z37b+kc<%M{|J#Uu*WC2&ndQzo=)#od7w5AvnF~c#6F4ztiS-9k;{icISTC=NpQED~ zs9hBmV%5cB8`di`{fv@66(TM(r*eg*J3q?2;dh_TWVq-;|H7Y%v zfeth^HN6bch~h$Qme$$G7E`aeY+bz7?{6Q<$i2FOeG+W{covBBUrr)4uIl!m7)-U* z`YuH845U=Q=&{V$Z*J57khZktItH7IF_jVJh+RM2d8f~~)q;fW%h64`<+|>^>Uc9E zW5c8lJ9S+DLb<4NmvNK#=dcP^_sV*Il{~9EQztmcmTHRi$Flh)4Wsg><>*V=cFt+$ zmEYr->G8d%{CH}v^^<|^)ra_2RHt*O(Vjkq~HIMfURQpz=O=nM!hRtA!>b@haVyOo2u4sRR zhN5?^(GBDY6dp=g|8doTz+-j$L4i^n8{6}~xOR!nZ(6+3LUve?p*&XSY_fi@zvs6M zHNODNiKX&ijvikgUM_Jc}_J_3fP{&~9 z@afD|58Ngp6)S2&ew1HW$S+{o^(8d>xm^91FBV^39b>$9{~1r!+WqQjIyyQF;P24x zN@D$2*!8$Z=&A*a9`-NBSdahyoncEVYiO5py20!L9J({gXqYW=yGeEyT6XgT+a2*P zj>xuHwbm%PlejS2xG7#$QnL2_pwbDbu~zHU9Q$aQRRG<>oI5p)1EHxB@3WEKSvp@j z&Z-#2-!$*I3arHo8m5#F*UcHHCxO#1;HQ$*?CEM7W38?bpWlHESvK ziynQp5AJ`^%~3R9btkN93cF2-dE+1H^vg8>x3ZgdT9kn+d!lE29TV)|7hSoN-20lQ zz4T@N=hMjEnKosR#bu^30Tki1rU%_YWu_Mh$U z-%i^yK61Kwh9gE#LnitV_XW>qk7iDK&@pryVi)cNDD&a*p`zpN7Y>sH&n4IE)Up3U=O^M2j zsx~7$9@#I>6L}Z<^MYg-;C#sBK11u~@XPNWHobw4(78`P6}lgNOH0cr5$6n^ia)oi zr#}oTxuYFd_VCUMUe~kM)QuEN@HOP0%Hq8(D;g8k zF=D1TXOH{H91@!Bf?=9|TIGuwjLcXG0)k)>>M zae}hRlXl;>eCR{}Oz}I{O&Al_YlrZLwvoSp^<^JsQX?+~r0xdMQcYiH=I)(AvT%27rbM*#o3_ zfS`j~;#^N28G5aayd0?X!~>KfWTrv|==74N*L$6MZxz2D_}g%&#DlPGBQQblEMhrO z@7p6l=Itu4-2KR?x`RtD?mtHt=CrcJj>}QYnQ9Buyz2T2m!mvTtgeW&<5g+ER_m$S z+juiNifm6g^~yzxXRKhD?uO0?+9G?8?vzkbA)K!udYw&@45A7y)?1LyMXoQ^eQp}a zirr{o+{F^;n7{P~;rhN#3mcQ4v|cE`erx5<%bAd4p-W$PiLtc(vB&adL>@TGGJAeI zdHO7ocV0ANSa9mo>)4l@gi#!bobwgdF%v*ZcA$N9lt8wy`QpPe*ZG_|Qu!vo84Rxo zv}BzU7nEWw9z^s_ia|dcb=^D&`q==2Udf|G(?rut%@BDIgMkE)&=S!gH6aIi{CK=? z8P+RPI=SCGzG<4w*q=Z5AEMoJzS2J+fPYZ2z7MBlPR?}Pz&M+CL+vML^$*K$tN0TOn6G#+z_pwJlA!22kFXN3W>qd0WAdwBZ@4-DoiihF%*ijKUgX; zUA_Hvj{Md3&0Es%h}Mw^l1&)NIE*wMBFEZqF+e^;^MMj0dqtL7p%iSD*$8uTD&U8G%Bfju z83c8oef&9)ao(==bbOyMpJw3wcagx53~3bw;ODg84_m~`9L)$f<&l6U&a zw0EucNXgUzag!zm3VwQuy;-|5JHn3>%T(!4J+S*Qgtq@2OLf+ca1&V|EXKk=;qkn&etTbsy};>M|_kZu^HX7X&26LAT3!|CXK|8L#nRx z(MxmXUoMD0y1~-FvYC0b!+1M*6!?=ii%OIPhjPKMb6w@$KEFPF*Vs;v)NT-$M;}jb zkf#HC4*|EteI484zl*Kzt@iabDSC1YF*7qm8wj637Ezb15!)ZQS7{;+x8t@c3r-f_ z+fGl>UtHUHlH7VS{r#6_XX1543l+;b-^=<8HSe2_y|z_A^r4@MbuSvWx09p#{kvAc zx~)R7MH5X!_wU~i=sipYy(HA2)0vUs;dC9Cx0;6DMV%g0esXjtPfkG_2_l=h@~+)w zafVot*t+ISM=()vniRDWp!>z>&1@>r#H)BmSYEP zcR!!@Ez#qt1k-{~pefm~el)p-(@O4M5XqIR*ZgPxH$u-=fY&2r0y5(;x!TF z0`Bju3CqfAR#q9!Ev?)3X5T3M z#xmS|H-8bg%s(=GAkrtsgFJk>n0nuju3*FK-kWxp;%l9M)KDamDOnI%8JToK*RvnC~Gz*<%CTo9wiJ%|MSurBw5`Z~QtqY<7*Ea;AD}P@WD@ z)6LDzd{|zM4?~QsRxWdcr{B7xZk*}{8YaPegiKALL7IE7bI%|F0X7$+#k>B$PrB*u z()j`AJS%fsNn&&UE3wwd!&x1uy@1|B=Y4Hi#lyaTuZizJ&0xOrG2!FKb4lb|s-}h% zZm=Z%zAXU8+Z0%d^PIZ|*LS$ly3enTli?KZ0209h-62x#z%w%3WWK$9d`r09YYF%8 zvFPf2A6kWaud_%|qwH~~2PQ+~n$Eo5`<*QawNu#){`Yp74hTHfq835XOjR$Mp`9l> zYM%rd`LpviJ(7uz!F=C2gruPUhsVYlcIrji=8tf76AeHRK*z9~3ps$)Nt^C6KQK4f)$whM5k}R^_EF&VKLX{^ z2G12mI>w71&kfNz)|u$}jSA6t01MwwA={dKQpcsWI;g_l@sVeq|0Hr%NT?4MKt8cG z=gZ*+nlhfgNqj1F&Ex0kKG439Rs@aU6FDp_Y#RiX!5hNNwZfwGRe5q>n)GciApsfI z$+XB1x2=YKh}8Rn(-NK?Q@J$?s;a88)AT1#GG@E-fH?vZ?ie@eYEs44awBUDPQbc% zVspNF)m;ztSlJ+E%bRk!Pg>398Vuyl3rXzpqes{4e#eD{1qlAd=Lb^iazLR-B0sri z4mw?@N8+V$-r{Suvo5+=&#q*7pFk#P;Eta-0fV3ER{6W?1#`%j>pp}F^;jMKef;?G zfZl?QimP0Clz$#%A^dEiFB!Xu&A}p06K^Hfe(Z^F?0+ao0w@BpGkxkH0>z#sVsn`N zUxN3$YA!Ok2J|F6#QE;{L*2%MrB}W8CQbU?pgV?u1ym_ctRHs>i)xS{M(B1N%lb)< z5@45$p$jYPRq#2S@)!=%38mO z0`{GYo4a+~9#v@kDE3^L*x$=nK>giJh)s465@h5pfFQ;>AeYHbkNx^(0YFgJdawT^ zz29S6qz`9kSIA27;~3gj>CA&*C#b_q$ALQ1Y#|)-iR&jgdfW!9?8p-I(cH@hT9ke& zwaS_Z@>vhq-GIKWKXpd}`wl+?2ITU&&x3nk0S;x#bmoDo4TVxbhgSnA87SzWAir4F z{xEJ0gpE#>S84kwu!jQozwAFG@`O;qdYvMn!mJrVl+Zm1Qt$67DDYKvSwk&fbo&;w zPIpHsRUICVA8Ut->kue;9_9sLvP>e2xXwK+G)UuPK@_?`zp<(?OIrg@ZnUoiI?>p> zmbXIs*m0>n_bwloLUIpNz&u7s!8HPViKUL_Uvnto8Iw9a;8FN==AH8M^TR_!?-!Z@ zRR>WD4wWLlGCqya6s1_Jt%lz!kN*6)*%~JfmO?P-nluFDC9rF?nplzJ&*M_RX>lVp z3+TE}qe7{OQGs~3p9f0yct)MGd#k)XlgJ=;nHCOBRsbK*$#s3TF^^;`#QNezMq1h> zG?Q7hR#a3dlpg#8t~gc8^!dzM;QmDPAJtOBs?Ky5uitC^;m`?ibD%zzgySVs8JPin zdeuIaJL~Pfj5zBepwCZb<6%^Q^J2_)KZhq~M;iL{TeO0tc3DDV$f8vR^drepfs0Bp zY2f!{!s)644K)qcNU(i^yYjH>Q>ld#{{X@{_=IV6S*jdxn(ApPot+K8T}$;mK;;e~h(^HEwXX%%?b&;mRGx6i8l6t(?5PLIy+zfW>gN9n(qXzZD;d)R>2Y(Id7;FU|2eQY4&r zA6%E62J0=zE?_V`CE}FE;arfP?=%*nIR)(yK6&RgFnQ$9hj`O~7eTuuC#d)F*6$5w zF<5L0s&}S+9mL*{Uvv`#k@kI8cXwbf%Irc@YFTKO_U6^A-1FxnlTmIFMUu)~7{x~* zKnU5z*_Mp#6|f!D0tvYAA<9}>*Zb2;1ZV0LWk5?|2BStdN|Epe;A@r?8ZjK5&=9xS zdUId7(gQ?cj&)9{EHpAj4}t(EPog9;5SPo5)mt*aEiRY$f}{3IIcWxW6QE@8+q2RE zHmgqMBp9S{u(g;c58+UaPARq9jie$3a%77YFUXCct`VSTJ_~(QzgQ?3xg;&O))uS= zOY+d$g}_&~#S^=PxAnJO8(Cb_6e2iZl;1p4+OoB4;Kj+R8Q1B7__6p?5 z>O2lag*eSv@X=UyB;1UW@GeVi3qn6!OHHW9d9tg%D(YhCM?T-;^uG?Mp-AzSJ zHP8^KLbudzX=?Qkv}XZp`B#Au8um&8`;+d#(uWFY1UR{twtv20vMsp@{-1&bb}IkO zFI%e?uI}#^qF|St6oPQlr*@~mK5=mh97+eidI4|(by`7Q9^O}-0%0G$5g@TgiyR#t z1;FC4J<7HVGxPLs%zS*qqXyR0VB(w>ga=dvEA+`L-12_6#acgjKKemyV`ZJ>un693>A>cAtVh`&uAtEyB2qK;Zv3&P$YHK=tn(ublMdjYEPj{b^ z6jWqnWF|*mvh|9u4x02c!~dd$9aGHOV%%r)CMGN()1*~Y=z$&VU8^@|q@Kg;oYLP> zer>0%AZ4Ix_+6gTFN*~mN*XFEM*XLwuf_ z7*V(VO(}W(52267(=jkG%wV6)tq|=rAxtSn%t0as``8}V9l$lHm`V|QU4uN+9~N!d z-g%d|@>gF87{8STqgx*d?T6Z@g$tp*CycR(K4S00#v5{$sp zf)_z50N4zmCTLuk&YA%50#X|Me`y_6yE~rM#R?ZWIH+e%QEH`DxCQ?rYpD_SeZGg5 zAu1nGP^}_QvIo*{wwU;YB-?><-;klzbBVbc;DT5N7Y}uL;9IpKD~w{igg)&{_V3vm zTL1E@xJMz_plG1FaPbbAYTLN>pKSq3!xXhhN^uARpx1XFivWpxA~Y-vmB~PqeGOHy z_5HQFRN^c9+t5~$e}yUUEe||+SzKHn6f$3L*6~L-_&t^_&>)Rqm@UpKMzdkSzvAd9 z5*sD%SqwDlXCwS^4dk>v1^8fDt3`M#5)>u(ZP_o{V4BUNXPDyx` z40*5W3&*w@;eSTHdbPiDWx(2FYoQV1K+7Jq5zw2+9nwdh6F60UE4CI}0H8y098o&T zJj5=9=zi00P=set8{3`*1A5FCTfFtpFRyigLWBxJH{pSgN(7e= zK18%8I6K9;3{*ohdo1*k{qzt;$dB`YZ%QGHCrj?Gw*w7%*hSPFO!*0rY#DJxkOZ?0 zDs>>o6)X&3(ee!yrSW77f+i|>n+nbp>hcJ`e=d}X&Ln!&g~CK7U0OfB%VO$4Z3Me0 zUZjZOa9w>C`btI{`~3X>ZXb$K1D<_(YpLBDvMQ=pa}AcdforE@G*~hRfR(Px8++Kv zG32}J&8=6W0_ivJLC}(OrwoH-Qz!>&yWpJ7C^WL@Neem2qn8GVNf~@T`{CwcIMQGd z#AybCfz=zxYKh=C0RcU5Z|&B$8qB6LlKUR8s&gTltV1cNLn}~Nu9$oGJ^lL~iiD4U zXWJYFM-VvDVAt$I9a@k__ZG#+iF;0=COaTTSUqR1f&Wlj!o1pN8|9}ew^xYoo(EQy z!4?W=?5?1yC#a#8xy1BBpWV%2)V=4iJ{Cy;ce)YYF$N+Njec;gbO7zSZBXgj0}Oi> zcwgchNlx90XOD&coDrM5RjSZM$rt&W-p_8ULaI*9K*h{*D6diQfv&I5Sw!RZ-x-YI zx9E`ag%2q*Z)f%XQ@d}2H1CrNVWA^ocl$QlReVAnGU&sAO#%myYM+;ABFVN7D9l0+ zvXwWiA%Ip|QW86cnFf_*w$=%VH zDGA#SQ3E45&~BsnW29~zZ^aIX05S+Bm{q(7^|re7n?gyu+LNaZXP^#&)Oeo^)c!f$ zhtsr7FWvJyDJhANXCkNXG5M(s3{NPi5oV?}Asferh8{<+4H4Iko+a-24JYFN`n4+R z;;r;_b)Fy6M*#ywSwK6xJZd3C$11)Nyio+)UFk=*JC*C~ut!0I*R@(#KSSHoGNf-B z+ZLz!%br0?q#ajJU`$I}qm>BkkBiTd%#O+6cHdN5-`QU-J-x5$<9^ZMyA&Ec=$zK1 zefxF~b%N;@KU4*jbMWB7@UXBX5WDH==)eTsY>Vf*a6uM`I1Gr0a6{ZcU}-SuJwlcA z^X20FLYKj-ZVf)ljfF-wbX`C{!wDW@fa{z*JXmne0Id}sACFsGbAyNfkF^?t`L7x; zWSa#|dM1pX)o_*f^Xg~R+^Fm4qijXXKCM z_~^5x`0%ER-wJlAkdcmBE_#^rbHBN`hw*r2f29R5>bZdFC|26-fI1}w*)&0 z|D%P`ab|Fkn#m9K$5SE3FX;WyXIHWWrW1wIV8v7(jZsQPp|gndlvdB_^OOG6z8I^K zMD;41nbSwvq6(dA>mJ>OkR3xx^+%856qbbU2$s*Zl?_b4?rwZ}WU{W1hVxOZ`*XvG zx5MLXy+3$}6#Tlpx|<*RYGSWKny8lGW{6b(EE(P4=waV-QGsi5d$50#IG9$ht3U24 zi5&Gm_XIYdKoj6UiEdyM2K5{K+8_S#A_tq5*=krgn+TGekD|c%IVIE79NAtgka|o6&xKb@F9h3ubW_z8*qL!+c|Rp&jX>qL!(Ba-Z5WkdfvZ z;}xdqhWAyL(QArAit2)u?;mDNUULq!=sn+1W)*OaGe=hO{yfV~I+>m`XVl_`*?5*r zCVo8LGrH-lzccd9ac(@W4ia0uu(^_O(3N}bZ9Qw;Z|GFZAII5r-({lDQI{MItw zBoUUNe!wsTTNe`bp+qr$eqH{+WYbtq*hTinvNr-0Qi`?5{Lsfe*!10T_Wj( zx$?Bpz%?65dR$52`7xa}`)fTyTCTEAeWuRa^lu~5VzRR zYtWXx@8D_H5T=wuk5YoICSYN)Esb1SSm=bfH#oqOc%yX>;{fSa zpnChox5&{NYSfkntFRl0SX@j@j03MLz&0*^{=(xTx9oMOp7q1A-POhy-ET0v;3*JH zn{mMNKIeUKj!F`mQbk3}2V2%RH(MGTQ_)l2vGga4VyjJDnFN}3#TzZ%)N|BmetcGU z&ejNgu;KHrAhk-9Ha&FW10vh{_&9_888V ztkX}D4YYUu@@aAP=OqlnaDh?^{dHO*56!HF>mh+#u{t}9eP(GS8^q5Br3C;Gzwo#0 zX@y~XwvV`tsU4y~Z@l;~utEG7GZc+X$Z5j^bAlN~JBhJQ(k(52KNsyI;3e?gCVMZD zbgPK|*wKtWk1c&#;Tcn6x4_-ox=Gd}1gQcq8^Yd9f?o&OBeJ5gbIw$_T(f9Fv)Y5dL)!*>bY5QG_hl|;# zHeZt#in3DEfT?vV?0^{ZgZAHm*R(zu|2^@EyIa6!nD4aqZU`>~rJ>JFs33jws)XkLMaV&}aQt9*H7O0Tt)$gysQC4hSNk4?l1^r!!$Y4)?ax3h&>l29y zxv$--GHj$+`RkYr)w6e&TOk1_Hr!Tkc{AWY-nyBPS|_bPUDPvXtnVxn_NM1cwNI%v zQuF+OU+4RWn1T(j_WRy>u6asv;@$>DjO&DM&23g5$F<1eyz8mUO>=Al7mbV5_|)I9 zjL(lHxj5Gm1WG<_@LQxBkcrL1E#qDTd;JcvQk+3YN)O?4;1gK-Q9Asa=)jjV*y4KF zDkpejFjHmt$I=vg-zgj6)2WIVSGdk@zwgSwPWZ~J1?xj&v#gcw*ip_SFGnkWBS;BK zibA*@`nJ<8uVe4@#z(wZznX1W{b$Hy8n5Z8V(e3=yO}o^7yo2^=Al5a|5>z$21f$u zW^Y6!JX?P7DMm3OvQE9I(yQm%rCa(>b&qdRNYTjsTfOiJ*N&8*z#*uh87} z?{%9WtNWo*2Cx7Bi~je!e{2KZ_<=H!3>!+IHGW4}T3yxD(t2-7{(Ev7>dRk?2dI3P zw`%R9DftC0OUCm<3_92&==%ykl@wa)T9c{5U?z4{C;P}x`g$-_KUxyNcXuklIDFK@P^^Fbe{nfYUO#(n6 zEA-v^2s3x*&Z}F}()7oVfAsT1qRaQf4J*K^tC=19{TsHD zsecLrzg@1=5bQUJy6p%$t*w;CSzWwzjR@42yjS z3V0f9aF_xk32w>7*Vh+qCN=f!pQE-(fZDOEdT)W|qhn&qr02p~03m>y-$5W%&tU|% zRO7*e4`nRF~$Xn+zmsmVsL_T=RiMWQSKti~|2_Bb1B zwom2V)tWQ@!H>I|?<{x9CHnqcedi zQU~{U-mfR;1+?Ky3(#FU42E8+=IwDs=aR0v=ngeum2mOsBwcs(or( zsM&MG0@%k05jhNG#7>Zb=-AloT)8kmAt^8dNeF2LMNm*@Tp+*I&;Y%Ta(K3`iH|SF zd#SA5R<47ZI4IK|o-At*zCdhz?)`vV(Ad4IHAT*+zm1 zbfqIysv6TASX9|KXrN-^tpE%i#b`C?j|i>wBzp%3pxHbi=&u9bps@x_=j=pmjwo>v zkKbB6`zO<5R~Lip?CGJQcM7i%94mq2hB!Xx<;%@j^6dy>a;HvJG-TS*+JgY{IIwz; ziXs|&#C_*~{3vybaUMiH*b6au2AvbGR(X z*E9mf4yoZHNZT@G8RGOWnZE#UQ>Y-yAb*`qF`U&DiL`nT1Q1 zlpS6!6-0E%P*>$Hra4m7cyD?i5faKZM>QSsEVLZQTG+H5zdTw;+h%)af?@SpyOG{E zzX!m)25H-NY?rtF%EWGH$9sF~>Z*9hvk~)i+D=oJpp@Ty+BO&XV+1~4HGvBJmzr2l zKb$BS$-BPX09+YGn7&$O2_faSzwYnkf75|SkGbbANGl~{l#Ep0s0Uw<$y7lxu*Pa9 zss@Ck^s?6p56oK&)&V%q9eC)ZY2W{Oyw%ra^~#lJey(4)Zk_Gm_v1sj6ZOifnL5*t zOQ8$DcE>B`)SL^D&|o&?lI$+G8-PD)FeE96=CLRfkft8( zjX534-O$iLV-dY+UD3>G|IBv|6P*wWE0b)nvv;HJqv@d-CuRL=j3R4nnqIm|^1Qq! zzgP^k`jeBBgSeA+jOD%!iM0nOhUWW}ns#yoK1?kQRQ)un5(=H z?=%%1>l97U`OalGYYO zJ+Rcv$LCtdcSBOx>Lor{wQ7~F0V56@%=~d960n}dy}$p~2Z8HZ&R2*k%38k5sOs(e z5?keOrKP1Mw#Y=J``~}G5TAICeV#Mp^hc-!PJUUN-w6dlS)BUk{@=fg!_K_sk4%Pf zl|M+if~Gf17`m4(MKpg{22p>)-a#sO^%A>JuTw22Jbwrrzs9&NG9e>kF3SN%*w3hp zaT9(0FqC})k*aPeHZ)YaFaS~mB>Z`f#yZBaE~9+nQus#L72*a7-V17SubU?w<|8vK3mH%gP|SXtk<=d0^2Ww>q>yHxErXU-hbV(1OYl?EPO zkbliz(&jAUh-Z2HVdUFL!)VkU9sbr0-@(V1B^W5xTh$u*2S*JanIaj#jq>yeZT37myU{IiXL}w;e(mBD6|E|S46q-#KRwA*)R>^hFfjWV1cil5I1G(F*l1~Q_BL{%3vnDZ;e89>NpS z=vedNyHgkla0iyQ_4F92y@{AUnSzVNQr-6`eosPtlgJ?4aQGwjjytQZqn2l#@RjrH zxmE9iIo79=6v6J_@{XO{KCsP}NJTC2%G(MVcAUDN)^e4*+M$X8^eoj<3!@}`ijAix zMk=G!;v**W3R*loJc3DPgc3F(oedYGc>sCw<=t%z$!XCy>0#p_3`czNL>s$#cD9r9VFBfZ6OFT&G=>h>rZC+MBqlIK78@iBvI%m|@+- z85Sauo$I)B04q&fTbno{y`hNLkcdnnB~S*x@pZdbRsE?gSlF6uYw@wlx9MGr^38n{ zv4l}tQbMhIey7L2KN8Iy(1KGnjQIN4W6lL&ERs@Eh!t9JH0VI0E=7HsVAgbuiZ46_ ztK^owmYwI)HSnp~tW?}zOYyOt_XDwpW&qRx%=>s4Xr4a(Bsj}K)uXE*kFi@)vMx@2 z-%&U}P`?FF1T9iTT@7#uA)RrBQXobWFV@!K6H`oQQvw2@ME*6XMCErXzDS7DmW6mj z;7uiR{vOvvbAK9cj#JSb!#A|Aq15vxIS0ne=8mFJ6b&l{Cg{ap7zPSX<8{68o z=k**2N+K2DeolOmpu)V__qDG1vYws+Mg?*EU!A6zocY&K4TWhQH^YKMd!wuN#w`l4 z1BHn4E^Iu##B1X4;X=gfxV2zYF^~!9HD5tm&{d&fLSdHAVHz7;Wqc#u&fD3|QHjpE zJX|R!&Q^J)K3p=2OSUo6Z*-iE?#Gl!f?+QtWY@r&Qiel0WMm-4lKl%$p9dTzfPhq& zjbg8`akzKv2q|fWE;S;d2A@5flJ~v9uy3+)J%8i~nE%efj#7LG3VtR2si`b#o;927 z2HP)I*SYQ0{jvnfnJ$(ldgE;0y?Zy$S+BXPD-xb!`u)E?lU#*`g@r$$3ByJS89VSD zPdMjnW}I^?C@A3d?LtmWj#2?)6{7I`l3M0%7fzXIl3JGd9KO4Rr*af?%;#;lJb`}r zG)69P9ql9}!O!ar31$wSg{7mKN|7KbKTF2Yh6Dq+QPcLq0)im+G5*IZtE{DP*bOc3 zWrIw}D%8*pcvciJA^TKcf3qx7RT1nlf8`!&X=#jXCPm$YwvFho*eGpByW#HdX!Q&X zz75O|DOZN9fOq>7K3SBhEunKa=<-812Sk#Jn3*P|kvCKc=#lI_UX&Y_nC_2VVBRNo<0qvRzkmeV7>`9aT>I=yn9v6 zfFdk7>Am}X+zL|&HPXgfeFc`Yfq!pZBp`xtu!!~E=dT3?B*#d^E#!A(*}F)0k^u`07?&V4a?g^A&C&0CdE zw;jI$;UJ&1^t(C>5>T{ghwP(CPH96KbVo7A5aO=+%Qos8 z$Uui%LFPFS^mSx(a&d7HWaVLgv=!>-`T?KvgMcxJ=SRsR4~>iyuccsQ%y=Daz5e%$ z`TOs`&ud$tv8Cc!ZfP-U)j?QLaKrPAdi*=BV8L~Q=PlH6no0!g8U$f2L4hsmXbux0 zGWd?FLXC(0XeyK>EkDonC&xY5Bz4{mOT>T*-i{F81mps90lV=*0~$CYn1_N&-y5hT z(^`G5^T%lT1Ipo~j7gdgY&iD`F%2J-VBZ2{8slu2-8)RXH41Vg#_84fetLz@IZ%@N zwy<;Chk<#ltOl3???g{UQ=NW(8>u@+6eE@%k+Idh^Ag4sAPQ3HwUHig-&T)lTa~2V zS5&bGUwK42QaEBoVnqxL4SC$?O=to1O7V}J(I5zgQQBR(uI-|t=g9%!^y$-guKP*Z zsUVfX{=)B$|CAFh4?5_yUk~_5$3doYNEPM{0iK>ASmF8rb!TOc1cs%+p@;{_`1{aJF zh~92;?<*jevTX?6Vzud)Dp|!KVxIYhtE%U>y~C0)8Tjii*^G4-5|{rF+PkLDos)O3#Xo5PnnLfbeyv}0RGORyRMyihUYE2E93X=dZY8v*hcWA zF%K?#0cRQrpS5`sdj=cq`gnMG^LqE6K7dDHiLa~qpHk?FBHV*4vCOOM?{23;nkJol z_w$#+a18rtCS%v7Gk#B=tN?6W8K?9x@~@mtb_&V$R=)K{FBx zCJh}Bm^F{IL{%KC=sSX@8JUGx-`@Z}7yS9mP~fkOeW=X=6AzlC>eye<-iN zHK0rNw-;{RzI{95MgtQldN`I8bwp3^EimaX{{}~}NI;M77uMjum0c7UFi!uCaMzAl zs*F`v|K|LMoSfsh9DjoEQMIGWdtOIA9B3_p+g!|_#MnbIN>fWLvD!UOHh1pbyH1in z#I_z|R@Tw6u?`pzm0?C*9^T9hh6WW}LNucdI6S|osHj(k|EH-a!8$Z317EGs`kvLq z2+SHB@9D?UifpAe(gS?rfq$C-b5pmSnn3YxEb1vLdKm$5~aO}`Ts~&YS z3bBHxHK?8l$lqtLj~zflDH^ z6H<32lKdu$C+Y#NI?G@+i12TDB|+fH~ZLN>GrzMAHN2 z>U*yh?%4EJq;$X~#!H{H_E!%RjO)d15lKQp}L(`yDqA3}H z1W>J2a{u`8U?g%HGRj)s-qiFOi~9_XF~F0u9Hg4Yc$gq;P~Jc}e*pawN#8{+lo-=M!CsubQT#i)hcpFTa)JPIO>=6Rsu%fyr)5R3WYNO0xT+|=Td*Bw%u zxZ`9BT7ls1ZbLAia^lupttrEP zvqUv=b@W{5DGn5wyjb?cF5bMELcN}m;$K$mC~?S`2WCPyKw5FgIBEh;5w~c4WT8Mw zC^M5NRfN$ycklATGZpt97mI^}7%wkTTyg^&42_Kjz72XtFXXt0`aAy~)6+Z4&f_P+ zg=?}>i#YqsdCv5IY}Ef}v^cKw->>{X>H%gu1^n#)5Ayy$SNWgU19c3`fq&QP|4UJc z^Bqhj1N&??6{Vsuz4jm*l8-O5PX+`Ae#bfmh_*0G7u-01qy+x#YCYNlHI?MOpvlY& z?Y`}+JXf8eN9yvcPC(C+(>cz?A+CwdG=L|KOXmm$9!#Ag`9wm<-roKFr)fJ%lGyZ2 z-tYLvHr-PS3JK}{*E-OYe@F5lZ&56)8v4-p95RSn;8R3TDo|*pto;#Lj|Zh?#GD80yHQpvl+dU zMqpsze(QPGYanhwM+MX^QUCjgUaPuO7_X$^dr*css8*iA?*yND!i_wM5et1KBF1Kk zGe6NYUr^W0xe$vNF{27Ywjjy>K*x ztDw|yl`fSNOSM=`RngpS6qu`d02-aGvIC%Cz^NMc&M!aHU~$9^vI@iQc?WDTG<2Nw z4(Ur@s{h!!IZOzB74`k?C>tyqhNngvF-%wfB^njdNzajVMRa~|+#1=BeS0>t@o5ms z+}bMeK5RKFox{vvr>ot@Hiil6L6pk|s4?Xf6mGoOx^c}Kl~$z@6mjl;e*CiNMpaR% zlByrop&aNp3zu53RN&@a59J?+O9mi??S)UDKMyCnT2x5XGE#k{ooP#8LSdsT&qFLo zda#gz(BDGCMQ1k*VHlarT>+!ZYfb}?q0W+v2YDTn<2M?lEm{?Ns$vfVOQbmg!PMqI zi5D(xJRq)vX)r78)4?c_2mlbG4_1%8bO+GiR_K#Rbw%3-jsNrOCe?9Av!=%*hry^| zI5!s!!ImvsaDiTc5p<$0{o3$))Z;GSr}`NUUVddIu|}|YPsDl{SX9ohk7`IxQPGPF z4MZ4vyE|XFcI=onQjgWW=dwwP7nLyT7j%GMZ4&baL{V>0mxPDk0Zwtz(xu9HYBV|D z#=MI4n^#!qdGFrwz|#JQXo6qEr0AQCkK>gqAs8xqNn<~R95(+FgFzr^ZV=1LfOwwG zkv$7W4eLd{IR*EjkBxZLg)F;KD|5-BQKjjLz?K2+<2K1`iALZ6ZxMjfsjGpBb9CcD zcvxRi-XWHhK&6nM&^`8cz%t9EQCa|#!eo;NH$rq;tJkdyhx97~oqk*Wb^IBWZM(i& zPB_xM4O9nI^x?ADfFEPHCw=l;-ZiW;8i$3+C5@8q!;B6Ph)SqazY zTnC>z3mv$*0XvU`oKFiBJXtsdTUu_$2AIx!&1SK0(gh6*4ha6nBNSgg87rA_kEViF*?r`aBKGk_OVP|&02IMds9 z3btwJ^+NE@PvWVVi*5vX{*>S>NGI}3OXD?%9z1i(ipDJ$w&lWYhLkQ;JG+sX5Y?^% z5X++#V%S6I0Ju8ptfY&e7WFS$*HOn_)@Jv46v>GsEx^IZ)Do?YU*Acr9u+7I^Uhtx z?i>fTegm%(%AiQp=$&v=n7@3JW~ZaF_Hw+6pAt9PH+R&%8~$)-aUx8o@Nm9j$H32A z|0#QxID~M_eZGhbz#K>mK-(9y@jT02^TdXmp**E^028Ay+DN6PO4vvkuRg7ypzu6t z6<}`KXNVlj0SziZ-9YT8V0!l+xXr#UYhcRgV4Z{j(LNzQGA#-#S`1Ks{rK@PmPrMk z?Zb@SiqI&6F$spIvIGAHL&T+6W;f8hdx?&qA@++ILrU9c(x6$AS~(5SIT3R=;2Dqo zmp12NoiuRl>+qW_>@foAf{q)*i%{URK>}aa>XIC&%?S7pUugy;fja;arPd*nUVJ3f z`hYLmv_YjN3mQSuE?^_MHg+-EnU93N;MQ{X8dkl{)kBN@4Fl2nWnFct7ARDV9`tPE z;`)?<>HHlcBFR>GE8~07V}1kJ zsBmu?#{)l+&6~3`Ako=gA%F7ZKJw$oDFod8eWeK7!ZriVe^$ynA}R`lf60VifIErI z>b@Bz*Q@AP<86uOEZHQP^2tW~=1*1-2#>f{tzLbu-R5`A^_b__W)eB8hl+A?a_WFQ zv?A{7*&3pZ*$V`gH%CsMhl3WL=2yTIjdW&ujt!ncdvrzsGu*JzWSs$@^c)`4A0bACO)O@L8H+6_8`8wdTNS(tVGN3Hc&;=`pjGA*x(PqSUx7f#w2{= zx(yC(DPR`Kn2_$+a2M7fH*@;50kFTeC<&)&dket279c~|$HYxVH@4ClYF7*Wc+Ry! zv3K=(g)VA`qR2*+U>+$>JHO({5g*pYYgTJC0lXD~yg|TBVi=W#@8ICzrRzb#!D2bd zMzs}dEsV1Q!yKx>Wz_3cBd0J?KCN|fnz>|$U4L&3ULHl)wb;m(40EErhiYpkXTTE` zfV}*M-1IQ&slUIGw!@GztvkcVK;wOg8T_P8(y_Kte0L*yeV?qU9+RWeeQ5LxP*ZAp z&H%M~*mDuO*X_MsL}Q4N6~T_3J98v4{d_cJT3ir-ni%G1<9`ilo;6j>Y(LZl7Bd-_sY zS*h0x)H$lkcI@z98R9^RL@0(J!r!qG6 zld+qRFTc)lqNmUd|K_N)>f*cCz^4h}CxlV&@ehjh;nm7Q3D+M>{gvRw`}gkMGe9-n z0rDx|VgT%fpc2FT*)v@#UA?5S>rW!F-+%DHTuoK4o`?S{Fz4>5Ay`7J5xatnENC%* z{`@uRAD!SQBbL+CnAOOiO@Nap2 z{;zxC?{?(q(dQ@%y=nn=4_uOwZ0j@m@b|(igYDn%ByF>aX^InXWSAl!-(i_SH}o7l z(3u)s7(|gXQOXc0QRbxq-JyQ zsEzVo#v@l(5j;$bbx=!il;Q&q6}?9?#IcDrm+W%0PI|+ECEubDhhYNT-1c=6MjzbF zrZ`@PCa`64s>-+6iCMH_yDl2e|8oL*(3Dh2-!wh@nM`2}K?vci&26Tj-_KWo%Cf*U zQFLr>HYC5k*kIx_oNL$W(>j#88k63ecF2-ff>8iBJL@{L<4=bmSYOnWzdG4T5*1BT zTbruBMjc&Z^6}%x*(dHOyl4DIkg-;)sI9FX+cboZM9hoMqv!xfm|q3l(HB4MgEBTH zBL-6xK2V9P=sU1sXd~p1&IzPbUr6{@ty^aR$NesIoih`%Xe)gSdMv zhdaX@AmjCslp(82W)3|WQ49Vdt%*@}j9GG9_U8S;cvH1Tje}9>D^g^mfs!ZOwR&w+ zoX&1E273hNo}15d;N0Stg$xmshVD2am&cDS8%E$eA!?F^R4wl>GIMMOLD{U~1q6OL zN2vgEfbNVLJc)hifbLAge_zZipOdj4OLBB{)Enh569K>%(NVV`^`$wAW5MHD0IgqE zZDJSHY#-QRjUx=aLCCgE;R9O3LtmIz-*<$n{6Un5%pdwCLH8j?3qh&bdXLl5^j+-L zt5JSl-GEc^*NG=L;C+y`F_@w}#o!dlhF?~zn|}RLK)v$q?}e)hK-MgF zuyX?3O4Utj_W=mLBdez2UD#oM;}FWu0+c0MomcSTP=@)!bK^q-n)=g^OIWgT>0ZnI z$^8=8@X_H_Y;a~BPR@FcocA0pPD-5@ax4<=6?8qFQTrSxM=x>KA`tp4P@V?J@yDq}sm}7&WrCfyrux zT3bY9G_QF9hDKHOuNnbszw-tDmSUC1Y3J4JoMm`Fe{Mjp0;{Hdqp9p6`YuZ?p*Me5@_oJIJ7_P-ZHa)OP`G}9aV}x=^0Y>BcUspsSvy< zZ-A*GT|9BkcTp)2b3C+fSiK~H>C|ovRvC5XZZwX&sTiXL(+b}sQy1pYkihUCB>)(_ zUMBptW#FCdNi}?^^Su!`eAWrO+&UZi z-&%kx-e`n<$=T%eW8`Mhtkd6W0}5tDg;6e+udh?A)=#2P*dpJl4#mI>xb;S1kHOVw`exhflU%Y zCVOb1VUB5r*+-j0nSftAqY8Rhnw1lVFHoCqk)6!9ncy#^p6{&lfL&v(051S7Q+xkNEZ31!S64^)10SR%b#Nd#q+`lD4lx za{p-K1QnI=T5dTr&b3U5o4;Q&PedYpu}^H_nh9S70r~aVAv(2a;9^+HYynW1Eym5A zYY3}^Dj#fBi@R?TCu4py=_F*Eva{Go4<+*wQGy+FL(B3~*w3~%wQbR34R{(cg&H7_ zztg*mhf$zZW{?=nGrHaz!UL$RAFprOI0C}iJ>1bO3M2o`%_$q_D3^4Mqp1ynNWL)E zrtw9aDvHd$kJ@Ig7xB0RkN-0T=;_WEl*ZyRRkUPKOS4a`1;lj?rKEptCkj5V8XcrO zNVf!&uGqA2cy#sjXod8T$y_*kXdb`B4pnYpIG4PGYK-$e4POF1nw1HJZh<#Y=j?z@dR`p%qq>$W9)M%@w9A({J=fh5Q=UHA(B(;gvcyIavVY>cK{dtA!ies!(frid*Z=b5=2Y zD7T2gRPYo1E*8QclLG)+EXV7{7dN4383}4rJk&RYLAM#UCht!Mh6eK}oCuoyakQtb z!8~6e_+9MUCfkGyX76jp<~s*-^xtTDEFzxMI^}uMacsGe2|Azqfn?2?|8yaum3^QP z>tD{Vq#w0;%_R6w9lEmv%jx)$;>qfyO5bmNZg+k=azW(gID zN!!hDXxgl1XBaoWt8YnOgu4p)(@8N(a-eVR8n#QHa&jcBP3O*?D=&3+%Zy0rXIAGi zoaTfQKU_*0W~GJwI%wpoH=SQO5Fq_@#LnLaAe=vE2q`+QzSJn|i)xnXyo6TyPH)Tu z#@nFXHGoubRLIr@#E^8hMTu zpE->>E3+);H(rrM4y=DZ1`l5?H+kN|qsZ{~@Sy(6FSEc;EdEnJaU2$gxVJ?PCkkh( za0;2yuOvQf*2%2lUWv4bJ!fV{pRG8=GN)sE$-xIDzU+VOdw*i#(fP#(i@)D#QTR_&_AnrN|x*!{M&D2WSTkk!e2?Ct24!ogUTO`-(85Q0c zzVrR8&xi*|517w)#-QGkn&BQao*x=Q#fk(br4lcoE7(0Fq8);@3k^-G(v43wAyry!V0#MFv}d3wH|o=+?gTGY$xuqq3SOAYB<$Fk zgm}}by0VDeTkPg+l&~@^9+BGQN+*`e+GNhV%ezY!clP!p6gA7#q@Ms{m%y0@y-aO@ z&5wkV{F1w=PPB8&pQ*+$2q>h`Vy4n+Y=wp6@8a1tO#0$J2Z|<5K=)I;?4FXfZI=lT zILPT!ca$+ey>#6kF8$}|=j4Gry9JSZK=$5kT6e(^)M!uRk+UTjf*1YJ8=J_50;;NE z37#1IEPuRg0R#31Vt6?>tGrhH3ed}xf>K?VU=RKQ#Xqks45UinxXP9=i3hhyMbS-FE-X#z90#3&n=VMl z1x9HbIx&44Pvq6Avg_&myiseE%sQnFJkQWhN3bw$zjwaZE|PW)4*{lLyzd26Dd|qu z(I&Sdu9A@Wb(f}mp5Yo zAwF|=Xh_K2`ordqh2tF%_bpXV0>&bpvnS9+9;ZdGPH6jyQndQ#A~xQ6^`Te~HM*_X zJa;GSfn|mRXq#t0LxX)b9Nn6qYp=(;Cu?bk8+{57vpgdkt`;AH5q3eInBzD5V$B>O zZc1!p02w7W&X_WO7RS%-!%G-rM&PzEb-YkL#JX>GR1j_JnID;?uL2}d29S>QWRU8V zxBab>C$W$Q9*o8S2(UG>Oa?%n$^$A)|2eWt&CjT)D3(5_iJoMu6uh}Dq1XkA5*bkX ztkdn5HFX54yKGcDg{gWTqW#5cAsENBsv46TeFkMVR>dLI6y;DvEt)4?IQKSzC8Lx0 z*lyD>Q?5S|%wLd>Q!C@<86hDdTlAn?m-#N|5GRkpF1X=PZA;>wfDuKsGxw$he1(7C zxO?YT-OZ-Q2ODX$6Qj30Jbf?oH*B2OBw0KKcIc4jaxN#v=slUq_ic$>guN3O4MuGr z<}C>1CpjMUbzB{AKiJPrI#(P5t=2xq^8f~f^Bl6Uc*#c$r`X z>5$+%X*wID07Ypw!47yO`yUws^2vS3gH1@9FT6oLnE)8by|quNE%Ep>!Ud-*zu*{F zJc56D9E zRXVaqq~Ye_v+YeKrvrnShb+*ney9m0{oy9p<7jP^gXKI}JeMXGVi-vYOCZgDEDoeL zOZ41A13R|1)B`CG)Y(0Pu={B^*3;d(^!~`!yU&^vj7Ugbj zs*-B12SJ-Ez^-`fdGjrx@2pN=L9`H5=%X1rCDt>ti)>qp@@<}UWcqXrqV=x%iM?=d zqYkQRWiSb#2HSBHptEjdKXX6Ph9i4-ULG^OlP2evRe-fPyvG3A$=fW;IZeLlBvDNPT&t#y6d!TRsS)gR< zE*H4HPsWGGx@u44RH8?Jf1GC6K-V zJmC=&Oa1iM`cOO%iH%=JI@$DPT9wt=x3Mf#R{dMt(X5w`Ea_+nbLGiKyTI_U2TkYs z-Lc*7WyllXG5pJUBw9@NXxfIbx3y{3GLv|D^Orbbd2br{b&DxNM{AO^?A?9Kwf}21 zf*Bk>{}RFMqE^A9%XZ7w6wBJQiEIBPmzeNv~4*U(u@Xv$V^MPd1^bzw9tNsx#UO1c^rG)ac=e?GSib^)) zt!Qh^+Ye#}7J@NZd@jK2PlKeDy<w9-;rd;?K|ADtte1&m)Q`BKUOM^3&7j++z_3?b?hSO;YqF(#TwA!@`ZFf02 zZ0K+dJ9K9e5%f7t3%iWjoNy#|6bW?@%$No>7upK_N%8^zkT=MZ?bvda0CcxT=q%13 z%RCY%GdyDB6#Z$>kyl}s=M4JzGNO-Y+5P;$inqnD|D5zT7u&Stj*7K%_Zrpn_zlG- zI5zznonB;T-@|sz&gF6@UvE?Yi7%?^Ia{Bm)U@@3DJq@2TyirOGE#-s%v9ryQKKD5 zSR-y5IFP%K_s`k9lJOy!_gvUZF#I%u0JoPQa)qsJt-$D7_rh+fK89Vg_KLQBTh~0V zX^HU%sm&QBAJo=dUFVeb9hUsvbLt?&i9waMAVY2xkZBd8H~=FFcKbwTnm*nkRV}I& zWjujWVA$@MHvuG~g9*hYW7Pn&tAEEJT;h{3hE|LzTO?bo{{gf)W zy2bl-O7hSNF{dUBR2`WVHNH5)TDUp-ZDW9jYs?Irra({H8ZV6Fwd1_7@2FSyp+TO) z%#;dfGNaAhEj^2ECfQXNvm8jN0LB|iq}-Raz^CP@4=f%#9(K%0p1eH_CoG|A`C}|B z7qP$Rs~P_JN87>Ry`r33mV6r1mz_@9gaUOzZITvNEDgIakm^|0_MOi}w*_xwY4s+# z6dX7afn?c5e&i@>==9)rtd(Wse>mqf#op3}Mug5iQI64;x-fP+zg6=7K+;jR0t~v` z*7mTTUei_-u)HQjS+*T}ljD@4VkP|a_2y<{Zg?VrwQ1>G6#RfC=^)DL3yCNoK3gTX zWj#am$0ur`UQeczTXI!l4TN=wJ2G*AmLz>BA=4tnb+{vt zldyZNS3DY7rXGI5LF0}3zKO5~SUwz*{*?hukc1{O_7m5YcT1oFJc%>{Jf6S+(spFQ zcY?+;;GvI#>NO5xb|3xf!L+k<)OjzO_3*+_$_6n`13}g3I<`htO<8)ox%+?m`8A$A zBQAiwDs{H7CG2$I2eygF{-r}!(JQF*sGd3i2^7vqUpo+;?ZJ)w$%*u58mcG)nPeEs#oi$U4cW z=sN^5+wWBJ!H|7XGDp&{WoZ&P{Z)WB+A~LOkx6xibuMcO z#&PYQ_`ym(x#_9fISgdktspH8I_x|_)#>B5zzlR1O_9nSJl}-5&WQKfGLyCq_I*gr zZ0>tJCWk4c!@1+5RAVAK24$>Lx+-x7CrTt+&Y%#z-N0d=2+z%*-S_;4^h7Ot@HY

BOd7yiW+yRaYBg<^*l2p&Y1cew(?qC9ik21m?Bn~D zS2gy}0keH4I2N|mpF;9kZnI(r)7Rru-Q}&Ar0F0@ zG$?cC&ae83a5>(i@S2;%me}lka8UBvr9;9XBSrJEqj83~gT?E2$k|9;vFn?s1y%*p z1s2bto08OU+;-UoY}`9--muu@)EXRfeKKC@Y`Aa0Car7)um@i~lwRJ--oF=ia@Km&;7$ zWj}#73B3m&PmF2?x&`(LjsQ|8VP%J6Dvj*%2v)Ew)``Dz((W|E>+s73NnoB9+5FN7 zJj6<~PHqD%?~vZ-8>WzE`8Ars)#hq`)(v#JCZ-IHa*IybJT|^py6hr%;mf`|?AlM< zt*<^|UXXUG28z)IM$XOM`V!Pfm0Q0ZMnLUen2WoT3%;6BvIm^jg#H+=C{$*;-}RzH zVN?I;prhJq99FjQ<^meOkAQNg=H2k{<}NaDt65bEX*aQ)wQT9W2w*(v@MB>`B4Y~mhX*>Uf zfNN2tAJZ(u)K+xYvciWC6eatQWRHtLdyF8S5Cd)?m{&Q%eRNVxtnxWxpjQ7&X`J0k z=a~0&VbZNOw6f-cUuH&HIYLkm@$x!h5>J6MgafIU%}c*93UHZq(%-tf>Ze5=TVd=O zZg5LUO+sJhNxjRB;m$GxO5x$@xr4_N=7b%tOeCS}zVJAAZ1&$k)fOUC>{2~lRy10ZKOyDFb(a@PGK&IO1i+B zv~@UIgHCe5(SdnuU(>lPzz0Wonle82f{;aOaVIIMQY-k1G+3qK{Kf9KHL~&B~5jeGH7N(TSh_SFm0tUtEy*c*vxh_?wYWD%igD@UL$i|af6Onx**DDwrRzg-+@KQ=)AK%^?&%wGdfBGe>T z|N4TfTAkv&Sr+xyK|=euS*KU@AZOK+@PJz(Qv)^mSCm!m8Km7G|XF!7MpCocec~Jsl zwR_~g2WaI^NF|G*hkIDJuo9o!E$xIBw6iqS6(@@6ysSfwN+~B28-`U}&HzJuQ%k8q zrnN*u7{z~V0T|Iq-EHT4pcv#IeqlFLUJv<})lE3ju=)g4VF-W>K(TXvL>L7Cp8Kj@ z<$%G~K@`Y*+qQiSEnG5uxey2L8pcaCI7^^-LPwzZwMcTs!nvBPbe8S5uP63k8SSbt z>w`E8RQVSd9XD)i`Ks9-wdg8y#sOO|sT*z*Ju|QX`P00PdTfCi3PAiKz-IysRH8Mj z=_!(+4}kh;MkOAN1=_@{CN<2A3y7FE=RRSM;LNY80Zi-vp(gF^Yn=Agg-%fu1{8J7 zjnG!2R-ru;h8$){&66A{5O}hhe1&w6-RcWMZ*>J#SqQ{%qzXH+2hVyB0(Y1IDR`oH z{8IwE3{lx52hYv(%OMR7hDrkZwXN$dTT$Tb2dgN8ATjj3Z!*ca{+f(+=}8e*ze3V* zB9|Ld-#3;46AoLYXic#R$5itc1&k+=n(-v~3jZ8LYj5CdA3Zk!{LeNZ{RN%h+McEf z$$UIS?MgWIc*1|a8>AGN!|q^?As3RRl(6ZK0~UCKzQ8J;BacqFS7+E*WSbp(g~TY` zR*Cg5I8tUlN<;w%|)b%Z*Zua6}+Ltbnv{0 zA%TN!CTM53y!)9;x;~;>=Ts(s`t*qk5FZ!# zy~}A5!syZP^D78M3P4`ys^17_IAMS9$3Aq-Axa^@d#`xl9k#z{~1 z4V-%}pU9p%l9DlWPPA72*s)``h7c7huE0Gi1TejH&IW|YRtUKkibkL4`uoH;KQUo_ zj-e`D*D?3LlHi99eK6HInB(v3+r`|#YL4UQ00nFa=m2c&jhpwyQul?foK8+oHxY0V zCmz;H6GDt-Y7JseaCaKkpzr$pjuYYEPxj==p)q4XM<+3abrRxu;0~@uLfS8zny5OyxZbcp6ap*)8N??xUHl~vFzVh1 z?~~$z0|uKJpOgc+<{r>}L}$NoIN|D#1V{nVNd?`$Xx_SpEWVYD^|}S{pE&p9NZAgEWS7%d~P#1XZmCng_@u z_fBM8nDbJ|rtu6G-Cvs+BmgZdD`N>*D#FZ%ikgctR5VB_w()s1(XmZ z{+iNx%P3x}OEL@6(Xw=z11+FJK`M9P?lVk%ED@K+q0qC^I4tp(jb7I4L1^y9RMYih zv%A-kYowcIem&g9;+ctV%OTIW>g|^Hl#fdpkA+&X=tVFFKb&4*yd1y3f|8o_A6uN9 zWJ55YeFZy;^~*{FhA6~47J#@*QHVL0lHW@;WF+SsizrHA!?T%BL;i3Qg46u9SXjE+Xp%Hja#cOwdug^(gwo^e^t~6 zvScGWdxO-yJ1K=}@^1+I1!!zP%=|g`pXSR|-Wl23%09mbCGi~E**)zy<-{3-HzltC zjtfBn>e$vVH_!RYT8rUsHBvdSO1hX&0|lv9cp;g3B7Z98PrTDvIUS0o@bj^kiu-~A zr?-MnTqyd&39{D^PtUiqU!C%!b21&C$tPi(Z3C#f zXC^v{@f#iH6&$xUJjyqt6UH#bR+YXuIRnMue1YZece!<=OzchyEz%9FmN zq)C=fbk5XS?Fnf4YfZ$@{=3xXcqxNy|7)jqN>1lQw3?jA+3k4GI zXw}XlhNVvR23Z1NpdjztH#)zEWFFVhfJ*nMj?&VtU3Cpj%pm7XKj*>x8R#gJRx*hp z3|~9}eWrj`+PSk0O*e3-nFM&#CE-F#G4^lR=LhDOw|IraX7Yfz&ANCH$S2u6M&Mj0 zl(IpP#nm2n#gn^I%WTJr!k?|a;fJv=oN$j=S&kmr9dm=^Ne7VV4K@`Ws5U+ojWat3 zv7zPj>^|)-+a?G-#$QFcUy!pvNtbtbLEub7rsRx(ISLP)ABK< z^$7ZO{$nY?tQ&YmG|7hWOwM(<=QqvG&CWdu{xVM5h@YPBm93%4BUYg(JFY>1h8BEg zB41~ED^PB9Wu5r&5-MadX^4kJhi(!nikmxBnCz9)^c^HaqWzfp6ka@_S8oUgq^TN& z9|y-<)m~ICwZ~t;!X_GR^50TDC;X)y)N7MXRI$Dx9Tnh->0n&V*>S3&&j45*!BwCS z&w-z#$A*8bN-?*z2o_whpMHgwxfAHbo-biop=8U59w7=f+uwdpeeX6434J$dTMNFB z1|PAzhR}F+;m7H+P$s$Kq&1p2hTv+8o$T^TG;(2eb#?zFC&KUR7@kB0x^OZXg82^( z7laL&qIIfPc5*n`u9y5tn-(FmL(PvU~0ATH9S zRdEK^qYm>%FN5hqFgCImx3si2>P(yq`Wn^VPEdA?h8NDL@n2!M zV)a@@v&y4XIJ*9Wwsk0Jq1OR$*r*x>X&Mrj5c_S-V0X2!e(4$@_ooquXd)DA`TETC z)X`qU1>DAfr84n7Vd!WuajOEYDrWlMJ0F}Q=aL0RFM!~Vq@U9ZIPK|RJnj}TmdORq z@GDS#C^OZenDnjkIH$uoAO#9MMJrFQ5b*vmG~MPjeH>BPZNOH4>*U-?3M-0lfA#2* zal5kLGVbUhpjcsaJPXp*;aaW089{dtH^?7L+=E{)^R z0TB@2E_)W^GKZu;(79cc>2|P{CEi*tuZt}eAu3q%xU??}gc-S47$qF@M=f$H3YI)||xpO>4 z`|y3HmSdP|Pk_ZDB2Crsh2V_z88YZU`$A&#Zrns6+{TPK$6ytdmPmrBvp(B?bjHFH zlX`?76R1$gASn_+yUzPma(tcMXOFg<4B{6}&Y;>-g3w#;qKotI6^sv-5$D2R!2pKN zXT`fg$o{^~Rq_=a?MhxGI$a0PP4p)hTk1WTBgR;ZG^q`!9L|A(Xr*bQ8iXRf&T=$3 zaFOYRk)%Lz`o_~W2%tt&E2BgX;BB~}EfllUG=~H;jLevJUu5aI+7xkoS2ivW|2aSW zzVMHKfBECmJm-IZ<@^v%fB*hc@PE#5ArGe6|NirX|9L$BwPsTOvnZVZ-4y@XGyjPc moBv%3|A~+P7x8g;hM{n4LRI*CsWLr9*`ue9L?60v`+osXcA(V& diff --git a/assets/nf-core-tools_social_preview.svg b/assets/nf-core-tools_social_preview.svg deleted file mode 100644 index afecc35166..0000000000 --- a/assets/nf-core-tools_social_preview.svg +++ /dev/null @@ -1,446 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - image/svg+xml - - - - - - - Python package with helper tools for the nf-core community - tools - - - - - - - - - - - - - - - - - - - - - - - - - From a770dcfb7aa7325c9607e8ae441884887c203c90 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 27 Apr 2021 00:01:18 +0200 Subject: [PATCH 124/210] New lint test: params_used Closes #1038 --- CHANGELOG.md | 1 + nf_core/lint/__init__.py | 24 ++++++++++++----------- nf_core/lint/merge_markers.py | 1 - nf_core/lint/params_used.py | 36 +++++++++++++++++++++++++++++++++++ 4 files changed, 50 insertions(+), 12 deletions(-) create mode 100644 nf_core/lint/params_used.py diff --git a/CHANGELOG.md b/CHANGELOG.md index f4a657d4c3..bfdb3942c1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ * Fix bug in nf-core lint config skipping for the `nextflow_config` test [[#1019](https://github.com/nf-core/tools/issues/1019)] * New `-k`/`--key` cli option for `nf-core lint` to allow you to run only named lint tests, for faster local debugging * Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory +* New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] ### Template diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index c43992c80d..82c4d57ddd 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -97,23 +97,24 @@ class PipelineLint(nf_core.utils.Pipeline): warned (list): A list of tuples of the form: ``(, )`` """ + from .actions_awsfulltest import actions_awsfulltest + from .actions_awstest import actions_awstest + from .actions_ci import actions_ci + from .actions_schema_validation import actions_schema_validation + from .conda_dockerfile import conda_dockerfile + from .conda_env_yaml import conda_env_yaml from .files_exist import files_exist from .files_unchanged import files_unchanged + from .merge_markers import merge_markers from .nextflow_config import nextflow_config - from .actions_ci import actions_ci - from .actions_awstest import actions_awstest - from .actions_awsfulltest import actions_awsfulltest - from .readme import readme - from .version_consistency import version_consistency - from .conda_env_yaml import conda_env_yaml - from .conda_dockerfile import conda_dockerfile - from .pipeline_todos import pipeline_todos + from .params_used import params_used from .pipeline_name_conventions import pipeline_name_conventions - from .template_strings import template_strings + from .pipeline_todos import pipeline_todos + from .readme import readme from .schema_lint import schema_lint from .schema_params import schema_params - from .actions_schema_validation import actions_schema_validation - from .merge_markers import merge_markers + from .template_strings import template_strings + from .version_consistency import version_consistency def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=False): """ Initialise linting object """ @@ -133,6 +134,7 @@ def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=Fal self.lint_tests = [ "files_exist", "nextflow_config", + "params_used", "files_unchanged", "actions_ci", "actions_awstest", diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py index 21a689a8ea..1e9bf362e0 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/lint/merge_markers.py @@ -14,7 +14,6 @@ def merge_markers(self): This test looks for remaining merge markers in the code, e.g.: >>>>>>> or <<<<<<< - """ passed = [] failed = [] diff --git a/nf_core/lint/params_used.py b/nf_core/lint/params_used.py new file mode 100644 index 0000000000..e58bf04be3 --- /dev/null +++ b/nf_core/lint/params_used.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +import os + + +def params_used(self): + """Check for that params in ``nextflow.config`` are mentioned in ``main.nf``.""" + + ignore_params_template = [ + "params.custom_config_version", + "params.custom_config_base", + "params.config_profile_name", + "params.show_hidden_params", + "params.schema_ignore_params", + ] + ignore_params = self.lint_config.get("params_used", []) + + passed = [] + warned = [] + ignored = [] + + with open(os.path.join(self.wf_path, "main.nf"), "r") as fh: + main_nf = fh.read() + + for cf in self.nf_config.keys(): + if not cf.startswith("params.") or cf in ignore_params_template: + continue + if cf in ignore_params: + ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) + continue + if cf in main_nf: + passed.append("Config variable found in `main.nf`: {}".format(self._wrap_quotes(cf))) + else: + warned.append("Config variable not found in `main.nf`: {}".format(self._wrap_quotes(cf))) + + return {"passed": passed, "warned": warned, "ignored": ignored} From 8b5928d21927654466738b769a7f429bccae1bd0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 27 Apr 2021 00:05:47 +0200 Subject: [PATCH 125/210] Make new lint docs --- docs/api/_src/pipeline_lint_tests/params_used.rst | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 docs/api/_src/pipeline_lint_tests/params_used.rst diff --git a/docs/api/_src/pipeline_lint_tests/params_used.rst b/docs/api/_src/pipeline_lint_tests/params_used.rst new file mode 100644 index 0000000000..3c0f123117 --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/params_used.rst @@ -0,0 +1,4 @@ +params_used +=========== + +.. automethod:: nf_core.lint.PipelineLint.params_used From e43c87b2f2df72add9e32f2984ed52791ff406ef Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 27 Apr 2021 11:22:38 +0200 Subject: [PATCH 126/210] Lint - merge markers - ignore binary files. Also add ability to ignore specific files in config. Moved binary detection function into utils. --- nf_core/create.py | 15 +++------------ nf_core/lint/merge_markers.py | 19 +++++++++++++++---- nf_core/utils.py | 17 +++++++++++++++++ 3 files changed, 35 insertions(+), 16 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index 85c60b7bc4..db3a47d6e6 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -6,7 +6,6 @@ import git import jinja2 import logging -import mimetypes import os import pathlib import requests @@ -83,8 +82,6 @@ def render_template(self): loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True ) template_dir = os.path.join(os.path.dirname(__file__), "pipeline-template") - binary_ftypes = ["image", "application/java-archive", "application/x-java-archive"] - binary_extensions = [".jpeg", ".jpg", ".png", ".zip", ".gz", ".jar", ".tar"] object_attrs = vars(self) object_attrs["nf_core_version"] = nf_core.__version__ @@ -108,15 +105,9 @@ def render_template(self): os.makedirs(os.path.dirname(output_path), exist_ok=True) try: - # Just copy certain file extensions - filename, file_extension = os.path.splitext(template_fn_path) - if file_extension in binary_extensions: - raise AttributeError(f"File extension: {file_extension}") - - # Try to detect binary files - (ftype, encoding) = mimetypes.guess_type(template_fn_path, strict=False) - if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])): - raise AttributeError(f"Encoding: {encoding}") + # Just copy binary files + if nf_core.utils.is_file_binary(template_fn_path): + raise AttributeError(f"Binary file: {template_fn_path}") # Got this far - render the template log.debug(f"Rendering template file: '{template_fn}'") diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py index 21a689a8ea..6f0d9e3d2e 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/lint/merge_markers.py @@ -5,6 +5,8 @@ import io import fnmatch +import nf_core.utils + log = logging.getLogger(__name__) @@ -18,6 +20,9 @@ def merge_markers(self): """ passed = [] failed = [] + ignored = [] + + ignored_config = self.lint_config.get("merge_markers", []) ignore = [".git"] if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): @@ -31,16 +36,22 @@ def merge_markers(self): dirs[:] = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] for fname in files: + # File ignored in config + if os.path.relpath(os.path.join(root, fname), self.wf_path) in ignored_config: + ignored.append(f"Ignoring file `{os.path.join(root, fname)}`") + continue + # Skip binary files + if nf_core.utils.is_file_binary(os.path.join(root, fname)): + continue try: with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh: for l in fh: if ">>>>>>>" in l: - failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {l}") + failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {l[:30]}") if "<<<<<<<" in l: - failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {l}") - print(root) + failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {l[:30]}") except FileNotFoundError: log.debug(f"Could not open file {os.path.join(root, fname)} in merge_markers lint test") if len(failed) == 0: passed.append("No merge markers found in pipeline files") - return {"passed": passed, "failed": failed} + return {"passed": passed, "failed": failed, "ignored": ignored} diff --git a/nf_core/utils.py b/nf_core/utils.py index 18f2dcb581..6c47d8c7b0 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -11,6 +11,7 @@ import hashlib import json import logging +import mimetypes import os import prompt_toolkit import re @@ -550,3 +551,19 @@ def write_line_break(self, data=None): CustomDumper.add_representer(dict, CustomDumper.represent_dict_preserve_order) return CustomDumper + + +def is_file_binary(path): + """ Check file path to see if it is a binary file """ + binary_ftypes = ["image", "application/java-archive", "application/x-java-archive"] + binary_extensions = [".jpeg", ".jpg", ".png", ".zip", ".gz", ".jar", ".tar"] + + # Check common file extensions + filename, file_extension = os.path.splitext(path) + if file_extension in binary_extensions: + return True + + # Try to detect binary files + (ftype, encoding) = mimetypes.guess_type(path, strict=False) + if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])): + return True From 25fe58722dd3985083c3b9e9ac308657c782ba60 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 27 Apr 2021 11:24:37 +0200 Subject: [PATCH 127/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f4a657d4c3..e59a4c73dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ * Fix bug in nf-core lint config skipping for the `nextflow_config` test [[#1019](https://github.com/nf-core/tools/issues/1019)] * New `-k`/`--key` cli option for `nf-core lint` to allow you to run only named lint tests, for faster local debugging * Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory +* Merge markers lint test - ignore binary files, allow config to ignore specific files [[#1040](https://github.com/nf-core/tools/pull/1040)] ### Template From 399a14eec6cac90bb36d6f58c3dcefe88c3f7e59 Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Wed, 28 Apr 2021 15:00:29 +0100 Subject: [PATCH 128/210] Modules installed in folders based on source repo name --- nf_core/modules/pipeline_modules.py | 12 +++++++++--- tests/test_modules.py | 9 +++++++++ 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/pipeline_modules.py b/nf_core/modules/pipeline_modules.py index 16774e01ba..ef77bc67f8 100644 --- a/nf_core/modules/pipeline_modules.py +++ b/nf_core/modules/pipeline_modules.py @@ -219,8 +219,11 @@ def install(self, module=None): return False log.debug("Installing module '{}' at modules hash {}".format(module, self.modules_repo.modules_current_hash)) + # Extract origin repository to use as install folder + install_folder = self.modules_repo.name.split('/')[0] + # Check that we don't already have a folder for this module - module_dir = os.path.join(self.pipeline_dir, "modules", "nf-core", "software", module) + module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) if os.path.exists(module_dir): log.error("Module directory already exists: {}".format(module_dir)) # TODO: uncomment next line once update is implemented @@ -231,7 +234,7 @@ def install(self, module=None): files = self.modules_repo.get_module_file_urls(module) log.debug("Fetching module files:\n - {}".format("\n - ".join(files.keys()))) for filename, api_url in files.items(): - dl_filename = os.path.join(self.pipeline_dir, "modules", "nf-core", filename) + dl_filename = os.path.join(self.pipeline_dir, "modules", install_folder, filename) self.modules_repo.download_gh_file(dl_filename, api_url) log.info("Downloaded {} files to {}".format(len(files), module_dir)) @@ -259,8 +262,11 @@ def remove(self, module): "Tool name:", choices=self.pipeline_module_names, style=nf_core.utils.nfcore_question_style ).ask() + # Extract origin repository to use as install folder + install_folder = self.modules_repo.name.split('/')[0] + # Get the module directory - module_dir = os.path.join(self.pipeline_dir, "modules", "nf-core", "software", module) + module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) # Verify that the module is actually installed if not os.path.exists(module_dir): diff --git a/tests/test_modules.py b/tests/test_modules.py index f1aa12b501..201a29f6bc 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -62,6 +62,15 @@ def test_modules_install_fastqc(self): module_path = os.path.join(self.mods.pipeline_dir, "modules", "nf-core", "software", "fastqc") assert os.path.exists(module_path) + def test_modules_install_fastqc_alternative_source(self): + """ Test installing a module from a different source repository - FastQC """ + mods = nf_core.modules.PipelineModules() + mods.modules_repo = nf_core.modules.ModulesRepo(repo='ewels/nf-core-modules', branch='master') + mods.pipeline_dir = self.pipeline_dir + assert mods.install("fastqc") is not False + module_path = os.path.join(self.mods.pipeline_dir, "modules", "ewels", "software", "fastqc") + assert os.path.exists(module_path) + def test_modules_install_fastqc_twice(self): """ Test installing a module - FastQC already there """ self.mods.install("fastqc") From 45f02b15e33640ea4be8f06c3763f171cb01838f Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Wed, 28 Apr 2021 15:07:24 +0100 Subject: [PATCH 129/210] black formatting --- nf_core/__main__.py | 2 +- nf_core/create.py | 2 +- nf_core/launch.py | 18 +++---- nf_core/lint/__init__.py | 2 +- nf_core/list.py | 8 +-- nf_core/modules/lint.py | 10 ++-- nf_core/modules/pipeline_modules.py | 6 +-- nf_core/modules/test_yml_builder.py | 10 ++-- nf_core/schema.py | 16 +++--- nf_core/sync.py | 2 +- nf_core/utils.py | 4 +- tests/test_bump_version.py | 4 +- tests/test_cli.py | 6 +-- tests/test_launch.py | 62 +++++++++++----------- tests/test_licenses.py | 2 +- tests/test_list.py | 8 +-- tests/test_modules.py | 32 ++++++------ tests/test_schema.py | 80 ++++++++++++++--------------- tests/test_sync.py | 34 ++++++------ tests/test_utils.py | 2 +- 20 files changed, 155 insertions(+), 155 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index af29172882..55498a6670 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -257,7 +257,7 @@ def licences(pipeline, json): # nf-core create def validate_wf_name_prompt(ctx, opts, value): - """ Force the workflow name to meet the nf-core requirements """ + """Force the workflow name to meet the nf-core requirements""" if not re.match(r"^[a-z]+$", value): click.echo("Invalid workflow name: must be lowercase without punctuation.") value = click.prompt(opts.prompt) diff --git a/nf_core/create.py b/nf_core/create.py index 85c60b7bc4..d7fa48dabe 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -47,7 +47,7 @@ def __init__(self, name, description, author, version="1.0dev", no_git=False, fo self.outdir = os.path.join(os.getcwd(), self.name_noslash) def init_pipeline(self): - """Creates the nf-core pipeline. """ + """Creates the nf-core pipeline.""" # Make the new pipeline self.render_template() diff --git a/nf_core/launch.py b/nf_core/launch.py index ce571f373c..02cbd9233b 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -21,7 +21,7 @@ class Launch(object): - """ Class to hold config option to launch a pipeline """ + """Class to hold config option to launch a pipeline""" def __init__( self, @@ -163,7 +163,7 @@ def launch_pipeline(self): self.launch_workflow() def get_pipeline_schema(self): - """ Load and validate the schema from the supplied pipeline """ + """Load and validate the schema from the supplied pipeline""" # Set up the schema self.schema_obj = nf_core.schema.PipelineSchema() @@ -224,7 +224,7 @@ def set_schema_inputs(self): self.schema_obj.validate_params() def merge_nxf_flag_schema(self): - """ Take the Nextflow flag schema and merge it with the pipeline schema """ + """Take the Nextflow flag schema and merge it with the pipeline schema""" # Add the coreNextflow subschema to the schema definitions if "definitions" not in self.schema_obj.schema: self.schema_obj.schema["definitions"] = {} @@ -236,7 +236,7 @@ def merge_nxf_flag_schema(self): self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) def prompt_web_gui(self): - """ Ask whether to use the web-based or cli wizard to collect params """ + """Ask whether to use the web-based or cli wizard to collect params""" log.info( "[magenta]Would you like to enter pipeline parameters using a web-based interface or a command-line wizard?" ) @@ -251,7 +251,7 @@ def prompt_web_gui(self): return answer["use_web_gui"] == "Web based" def launch_web_gui(self): - """ Send schema to nf-core website and launch input GUI """ + """Send schema to nf-core website and launch input GUI""" content = { "post_content": "json_schema_launcher", @@ -356,7 +356,7 @@ def sanitise_web_response(self): params[param_id] = filter_func(params[param_id]) def prompt_schema(self): - """ Go through the pipeline schema and prompt user to change defaults """ + """Go through the pipeline schema and prompt user to change defaults""" answers = {} # Start with the subschema in the definitions - use order of allOf for allOf in self.schema_obj.schema.get("allOf", []): @@ -618,7 +618,7 @@ def print_param_header(self, param_id, param_obj, is_group=False): console.print("(Use arrow keys)", style="italic", highlight=False) def strip_default_params(self): - """ Strip parameters if they have not changed from the default """ + """Strip parameters if they have not changed from the default""" # Go through each supplied parameter (force list so we can delete in the loop) for param_id in list(self.schema_obj.input_params.keys()): @@ -642,7 +642,7 @@ def strip_default_params(self): del self.nxf_flags[param_id] def build_command(self): - """ Build the nextflow run command based on what we know """ + """Build the nextflow run command based on what we know""" # Core nextflow options for flag, val in self.nxf_flags.items(): @@ -676,7 +676,7 @@ def build_command(self): self.nextflow_cmd += ' --{} "{}"'.format(param, str(val).replace('"', '\\"')) def launch_workflow(self): - """ Launch nextflow if required """ + """Launch nextflow if required""" log.info("[bold underline]Nextflow command:[/]\n[magenta]{}\n\n".format(self.nextflow_cmd)) if Confirm.ask("Do you want to run this command now? "): diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 82c4d57ddd..a256fab6ba 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -117,7 +117,7 @@ class PipelineLint(nf_core.utils.Pipeline): from .version_consistency import version_consistency def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=False): - """ Initialise linting object """ + """Initialise linting object""" # Initialise the parent object super().__init__(wf_path) diff --git a/nf_core/list.py b/nf_core/list.py index 3278d8e1ff..6cd64acb48 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -263,7 +263,7 @@ def sort_pulled_date(wf): return table def print_json(self): - """ Dump JSON of all parsed information """ + """Dump JSON of all parsed information""" return json.dumps( {"local_workflows": self.local_workflows, "remote_workflows": self.remote_workflows}, default=lambda o: o.__dict__, @@ -308,10 +308,10 @@ def __init__(self, data): class LocalWorkflow(object): - """ Class to handle local workflows pulled by nextflow """ + """Class to handle local workflows pulled by nextflow""" def __init__(self, name): - """ Initialise the LocalWorkflow object """ + """Initialise the LocalWorkflow object""" self.full_name = name self.repository = None self.local_path = None @@ -324,7 +324,7 @@ def __init__(self, name): self.last_pull_pretty = None def get_local_nf_workflow_details(self): - """ Get full details about a local cached workflow """ + """Get full details about a local cached workflow""" if self.local_path is None: diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index 21b0deab6d..b5aa4cdeec 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -37,7 +37,7 @@ class ModuleLintException(Exception): class LintResult(object): - """ An object to hold the results of a lint test """ + """An object to hold the results of a lint test""" def __init__(self, mod, lint_test, message, file_path): self.mod = mod @@ -476,7 +476,7 @@ def __init__(self, module_dir, repo_type, base_dir, nf_core_module=True): self.module_name = module_dir.split("software" + os.sep)[1] def lint(self): - """ Perform linting on this module """ + """Perform linting on this module""" # Iterate over modules and run all checks on them # Lint the main.nf file @@ -501,7 +501,7 @@ def lint(self): return self.passed, self.warned, self.failed def lint_module_tests(self): - """ Lint module tests """ + """Lint module tests""" if os.path.exists(self.test_dir): self.passed.append(("test_dir_exists", "Test directory exists", self.test_dir)) @@ -552,7 +552,7 @@ def lint_module_tests(self): self.failed.append(("test_yml_exists", "Test `test.yml` does not exist", self.test_yml)) def lint_meta_yml(self): - """ Lint a meta yml file """ + """Lint a meta yml file""" required_keys = ["name", "input", "output"] required_keys_lists = ["intput", "output"] try: @@ -863,7 +863,7 @@ def _parse_output(self, line): return output def _is_empty(self, line): - """ Check whether a line is empty or a comment """ + """Check whether a line is empty or a comment""" empty = False if line.strip().startswith("//"): empty = True diff --git a/nf_core/modules/pipeline_modules.py b/nf_core/modules/pipeline_modules.py index ef77bc67f8..3d9825dc8e 100644 --- a/nf_core/modules/pipeline_modules.py +++ b/nf_core/modules/pipeline_modules.py @@ -220,7 +220,7 @@ def install(self, module=None): log.debug("Installing module '{}' at modules hash {}".format(module, self.modules_repo.modules_current_hash)) # Extract origin repository to use as install folder - install_folder = self.modules_repo.name.split('/')[0] + install_folder = self.modules_repo.name.split("/")[0] # Check that we don't already have a folder for this module module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) @@ -263,7 +263,7 @@ def remove(self, module): ).ask() # Extract origin repository to use as install folder - install_folder = self.modules_repo.name.split('/')[0] + install_folder = self.modules_repo.name.split("/")[0] # Get the module directory module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) @@ -295,7 +295,7 @@ def remove(self, module): return False def get_pipeline_modules(self): - """ Get list of modules installed in the current pipeline """ + """Get list of modules installed in the current pipeline""" self.pipeline_module_names = [] module_mains = glob.glob(f"{self.pipeline_dir}/modules/nf-core/software/**/main.nf", recursive=True) for mod in module_mains: diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index 870f63c30b..895d307a23 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -47,7 +47,7 @@ def __init__( self.tests = [] def run(self): - """ Run build steps """ + """Run build steps""" if not self.no_prompts: log.info( "[yellow]Press enter to use default values [cyan bold](shown in brackets) [yellow]or type your own responses" @@ -58,7 +58,7 @@ def run(self): self.print_test_yml() def check_inputs(self): - """ Do more complex checks about supplied flags. """ + """Do more complex checks about supplied flags.""" # Get the tool name if not specified if self.module_name is None: @@ -113,7 +113,7 @@ def check_inputs(self): ) def scrape_workflow_entry_points(self): - """ Find the test workflow entry points from main.nf """ + """Find the test workflow entry points from main.nf""" log.info(f"Looking for test workflow entry points: '{self.module_test_main}'") with open(self.module_test_main, "r") as fh: for line in fh: @@ -196,7 +196,7 @@ def _md5(self, fname): return md5sum def create_test_file_dict(self, results_dir): - """ Walk through directory and collect md5 sums """ + """Walk through directory and collect md5 sums""" test_files = [] for root, dir, file in os.walk(results_dir): for elem in file: @@ -250,7 +250,7 @@ def get_md5_sums(self, entry_point, command): return test_files def run_tests_workflow(self, command): - """ Given a test workflow and an entry point, run the test workflow """ + """Given a test workflow and an entry point, run the test workflow""" # The config expects $PROFILE and Nextflow fails if it's not set if os.environ.get("PROFILE") is None: diff --git a/nf_core/schema.py b/nf_core/schema.py index 5196bcd8fb..51eea138e1 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -28,7 +28,7 @@ class PipelineSchema(object): functions to handle pipeline JSON Schema""" def __init__(self): - """ Initialise the object """ + """Initialise the object""" self.schema = None self.pipeline_dir = None @@ -46,7 +46,7 @@ def __init__(self): self.web_schema_build_api_url = None def get_schema_path(self, path, local_only=False, revision=None): - """ Given a pipeline name, directory, or path, set self.schema_filename """ + """Given a pipeline name, directory, or path, set self.schema_filename""" # Supplied path exists - assume a local pipeline directory or schema if os.path.exists(path): @@ -75,7 +75,7 @@ def get_schema_path(self, path, local_only=False, revision=None): raise AssertionError(error) def load_lint_schema(self): - """ Load and lint a given schema to see if it looks valid """ + """Load and lint a given schema to see if it looks valid""" try: self.load_schema() num_params = self.validate_schema() @@ -92,7 +92,7 @@ def load_lint_schema(self): raise AssertionError(error_msg) def load_schema(self): - """ Load a pipeline schema from a file """ + """Load a pipeline schema from a file""" with open(self.schema_filename, "r") as fh: self.schema = json.load(fh) self.schema_defaults = {} @@ -153,7 +153,7 @@ def get_schema_defaults(self): self.schema_defaults[p_key] = param["default"] def save_schema(self): - """ Save a pipeline schema to a file """ + """Save a pipeline schema to a file""" # Write results to a JSON file num_params = len(self.schema.get("properties", {})) num_params += sum([len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()]) @@ -189,7 +189,7 @@ def load_input_params(self, params_path): raise AssertionError(error_msg) def validate_params(self): - """ Check given parameters against a schema and validate """ + """Check given parameters against a schema and validate""" try: assert self.schema is not None jsonschema.validate(self.input_params, self.schema) @@ -317,7 +317,7 @@ def validate_schema_title_description(self, schema=None): ) def make_skeleton_schema(self): - """ Make a new pipeline schema from the template """ + """Make a new pipeline schema from the template""" self.schema_from_scratch = True # Use Jinja to render the template schema file to a variable env = jinja2.Environment( @@ -332,7 +332,7 @@ def make_skeleton_schema(self): self.get_schema_defaults() def build_schema(self, pipeline_dir, no_prompts, web_only, url): - """ Interactively build a new pipeline schema for a pipeline """ + """Interactively build a new pipeline schema for a pipeline""" if no_prompts: self.no_prompts = True diff --git a/nf_core/sync.py b/nf_core/sync.py index f198f90ca2..6b7c8168f7 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -63,7 +63,7 @@ def __init__( gh_repo=None, gh_username=None, ): - """ Initialise syncing object """ + """Initialise syncing object""" self.pipeline_dir = os.path.abspath(pipeline_dir) self.from_branch = from_branch diff --git a/nf_core/utils.py b/nf_core/utils.py index 18f2dcb581..2670f0310d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -108,7 +108,7 @@ class Pipeline(object): """ def __init__(self, wf_path): - """ Initialise pipeline object """ + """Initialise pipeline object""" self.conda_config = {} self.conda_package_info = {} self.nf_config = {} @@ -521,7 +521,7 @@ def get_tag_date(tag_date): def custom_yaml_dumper(): - """ Overwrite default PyYAML output to make Prettier YAML linting happy """ + """Overwrite default PyYAML output to make Prettier YAML linting happy""" class CustomDumper(yaml.Dumper): def represent_dict_preserve_order(self, data): diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 74e9dfddf0..9ced58b8e2 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -11,7 +11,7 @@ def test_bump_pipeline_version(datafiles): - """ Test that making a release with the working example files works """ + """Test that making a release with the working example files works""" # Get a workflow and configs test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( @@ -49,7 +49,7 @@ def test_bump_pipeline_version(datafiles): def test_dev_bump_pipeline_version(datafiles): - """ Test that making a release works with a dev name and a leading v """ + """Test that making a release works with a dev name and a leading v""" # Get a workflow and configs test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( diff --git a/tests/test_cli.py b/tests/test_cli.py index eb1ab6f9df..474314b8eb 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -11,12 +11,12 @@ @mock.patch("nf_core.__main__.nf_core_cli") def test_header(mock_cli): - """ Just try to execute the header function """ + """Just try to execute the header function""" nf_core.__main__.run_nf_core() def test_cli_help(): - """ Test the main launch function with --help """ + """Test the main launch function with --help""" runner = CliRunner() result = runner.invoke(nf_core.__main__.nf_core_cli, ["--help"]) assert result.exit_code == 0 @@ -24,7 +24,7 @@ def test_cli_help(): def test_cli_bad_subcommand(): - """ Test the main launch function with verbose flag and an unrecognised argument """ + """Test the main launch function with verbose flag and an unrecognised argument""" runner = CliRunner() result = runner.invoke(nf_core.__main__.nf_core_cli, ["-v", "foo"]) assert result.exit_code == 2 diff --git a/tests/test_launch.py b/tests/test_launch.py index e592d56363..560619a689 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -16,7 +16,7 @@ class TestLaunch(unittest.TestCase): """Class for launch tests""" def setUp(self): - """ Create a new PipelineSchema and Launch objects """ + """Create a new PipelineSchema and Launch objects""" # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") @@ -26,12 +26,12 @@ def setUp(self): @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): - """ Test the main launch function """ + """Test the main launch function""" self.launcher.launch_pipeline() @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists(self, mock_confirm): - """ Test that we detect an existing params file and return """ + """Test that we detect an existing params file and return""" # Make an empty params file to be overwritten open(self.nf_params_fn, "a").close() # Try and to launch, return with error @@ -41,19 +41,19 @@ def test_launch_file_exists(self, mock_confirm): @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists_overwrite(self, mock_webbrowser, mock_lauch_web_gui, mock_confirm): - """ Test that we detect an existing params file and we overwrite it """ + """Test that we detect an existing params file and we overwrite it""" # Make an empty params file to be overwritten open(self.nf_params_fn, "a").close() # Try and to launch, return with error self.launcher.launch_pipeline() def test_get_pipeline_schema(self): - """ Test loading the params schema from a pipeline """ + """Test loading the params schema from a pipeline""" self.launcher.get_pipeline_schema() assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 def test_make_pipeline_schema(self): - """ Make a copy of the template workflow, but delete the schema file, then try to load it """ + """Make a copy of the template workflow, but delete the schema file, then try to load it""" test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "wf") shutil.copytree(self.template_dir, test_pipeline_dir) os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) @@ -68,14 +68,14 @@ def test_make_pipeline_schema(self): } def test_get_pipeline_defaults(self): - """ Test fetching default inputs from the pipeline schema """ + """Test fetching default inputs from the pipeline schema""" self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() assert len(self.launcher.schema_obj.input_params) > 0 assert self.launcher.schema_obj.input_params["outdir"] == "./results" def test_get_pipeline_defaults_input_params(self): - """ Test fetching default inputs from the pipeline schema with an input params file supplied """ + """Test fetching default inputs from the pipeline schema with an input params file supplied""" tmp_filehandle, tmp_filename = tempfile.mkstemp() with os.fdopen(tmp_filehandle, "w") as fh: json.dump({"outdir": "fubar"}, fh) @@ -86,7 +86,7 @@ def test_get_pipeline_defaults_input_params(self): assert self.launcher.schema_obj.input_params["outdir"] == "fubar" def test_nf_merge_schema(self): - """ Checking merging the nextflow schema with the pipeline schema """ + """Checking merging the nextflow schema with the pipeline schema""" self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() self.launcher.merge_nxf_flag_schema() @@ -94,7 +94,7 @@ def test_nf_merge_schema(self): assert "-resume" in self.launcher.schema_obj.schema["definitions"]["coreNextflow"]["properties"] def test_ob_to_questionary_string(self): - """ Check converting a python dict to a pyenquirer format - simple strings """ + """Check converting a python dict to a pyenquirer format - simple strings""" sc_obj = { "type": "string", "default": "data/*{1,2}.fastq.gz", @@ -104,17 +104,17 @@ def test_ob_to_questionary_string(self): @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): - """ Check the prompt to launch the web schema or use the cli """ + """Check the prompt to launch the web schema or use the cli""" assert self.launcher.prompt_web_gui() == True @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Command line"}]) def test_prompt_web_gui_false(self, mock_prompt): - """ Check the prompt to launch the web schema or use the cli """ + """Check the prompt to launch the web schema or use the cli""" assert self.launcher.prompt_web_gui() == False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{}]) def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): - """ Check the code that opens the web browser """ + """Check the code that opens the web browser""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() try: @@ -129,14 +129,14 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): @mock.patch("webbrowser.open") @mock.patch("nf_core.utils.wait_cli_function") def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wait_cli_function): - """ Check the code that opens the web browser """ + """Check the code that opens the web browser""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() assert self.launcher.launch_web_gui() == None @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - status error """ + """Test polling the website for a launch response - status error""" try: self.launcher.get_web_launch_response() raise UserWarning("Should have hit an AssertionError") @@ -145,7 +145,7 @@ def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "foo"}]) def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - status error """ + """Test polling the website for a launch response - status error""" try: self.launcher.get_web_launch_response() raise UserWarning("Should have hit an AssertionError") @@ -154,12 +154,12 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - status waiting_for_user""" + """Test polling the website for a launch response - status waiting_for_user""" assert self.launcher.get_web_launch_response() == False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - complete, but missing keys """ + """Test polling the website for a launch response - complete, but missing keys""" try: self.launcher.get_web_launch_response() raise UserWarning("Should have hit an AssertionError") @@ -183,12 +183,12 @@ def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): ) @mock.patch.object(nf_core.launch.Launch, "sanitise_web_response") def test_get_web_launch_response_valid(self, mock_poll_nfcore_web_api, mock_sanitise): - """ Test polling the website for a launch response - complete, valid response """ + """Test polling the website for a launch response - complete, valid response""" self.launcher.get_pipeline_schema() assert self.launcher.get_web_launch_response() == True def test_sanitise_web_response(self): - """ Check that we can properly sanitise results from the web """ + """Check that we can properly sanitise results from the web""" self.launcher.get_pipeline_schema() self.launcher.nxf_flags["-name"] = "" self.launcher.schema_obj.input_params["single_end"] = "true" @@ -199,7 +199,7 @@ def test_sanitise_web_response(self): assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): - """ Check converting a python dict to a pyenquirer format - booleans """ + """Check converting a python dict to a pyenquirer format - booleans""" sc_obj = { "type": "boolean", "default": "True", @@ -219,7 +219,7 @@ def test_ob_to_questionary_bool(self): assert result["filter"](False) == False def test_ob_to_questionary_number(self): - """ Check converting a python dict to a pyenquirer format - with enum """ + """Check converting a python dict to a pyenquirer format - with enum""" sc_obj = {"type": "number", "default": 0.1} result = self.launcher.single_param_to_questionary("min_reps_consensus", sc_obj) assert result["type"] == "input" @@ -233,7 +233,7 @@ def test_ob_to_questionary_number(self): assert result["filter"]("") == "" def test_ob_to_questionary_integer(self): - """ Check converting a python dict to a pyenquirer format - with enum """ + """Check converting a python dict to a pyenquirer format - with enum""" sc_obj = {"type": "integer", "default": 1} result = self.launcher.single_param_to_questionary("broad_cutoff", sc_obj) assert result["type"] == "input" @@ -247,7 +247,7 @@ def test_ob_to_questionary_integer(self): assert result["filter"]("") == "" def test_ob_to_questionary_range(self): - """ Check converting a python dict to a pyenquirer format - with enum """ + """Check converting a python dict to a pyenquirer format - with enum""" sc_obj = {"type": "number", "minimum": "10", "maximum": "20", "default": 15} result = self.launcher.single_param_to_questionary("broad_cutoff", sc_obj) assert result["type"] == "input" @@ -261,7 +261,7 @@ def test_ob_to_questionary_range(self): assert result["filter"]("") == "" def test_ob_to_questionary_enum(self): - """ Check converting a python dict to a questionary format - with enum """ + """Check converting a python dict to a questionary format - with enum""" sc_obj = {"type": "string", "default": "copy", "enum": ["symlink", "rellink"]} result = self.launcher.single_param_to_questionary("publish_dir_mode", sc_obj) assert result["type"] == "list" @@ -269,7 +269,7 @@ def test_ob_to_questionary_enum(self): assert result["choices"] == ["symlink", "rellink"] def test_ob_to_questionary_pattern(self): - """ Check converting a python dict to a questionary format - with pattern """ + """Check converting a python dict to a questionary format - with pattern""" sc_obj = {"type": "string", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"} result = self.launcher.single_param_to_questionary("email", sc_obj) assert result["type"] == "input" @@ -281,7 +281,7 @@ def test_ob_to_questionary_pattern(self): ) def test_strip_default_params(self): - """ Test stripping default parameters """ + """Test stripping default parameters""" self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) @@ -290,14 +290,14 @@ def test_strip_default_params(self): assert self.launcher.schema_obj.input_params == {"input": "custom_input"} def test_build_command_empty(self): - """ Test the functionality to build a nextflow command - nothing customsied """ + """Test the functionality to build a nextflow command - nothing customsied""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.build_command() assert self.launcher.nextflow_cmd == "nextflow run {}".format(self.template_dir) def test_build_command_nf(self): - """ Test the functionality to build a nextflow command - core nf customised """ + """Test the functionality to build a nextflow command - core nf customised""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.nxf_flags["-name"] = "Test_Workflow" @@ -306,7 +306,7 @@ def test_build_command_nf(self): assert self.launcher.nextflow_cmd == 'nextflow run {} -name "Test_Workflow" -resume'.format(self.template_dir) def test_build_command_params(self): - """ Test the functionality to build a nextflow command - params supplied """ + """Test the functionality to build a nextflow command - params supplied""" self.launcher.get_pipeline_schema() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() @@ -320,7 +320,7 @@ def test_build_command_params(self): assert saved_json == {"input": "custom_input"} def test_build_command_params_cl(self): - """ Test the functionality to build a nextflow command - params on Nextflow command line """ + """Test the functionality to build a nextflow command - params on Nextflow command line""" self.launcher.use_params_file = False self.launcher.get_pipeline_schema() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) diff --git a/tests/test_licenses.py b/tests/test_licenses.py index 385237229f..7af179bdc9 100644 --- a/tests/test_licenses.py +++ b/tests/test_licenses.py @@ -17,7 +17,7 @@ class WorkflowLicensesTest(unittest.TestCase): retrieval functionality of nf-core tools.""" def setUp(self): - """ Create a new pipeline, then make a Licence object """ + """Create a new pipeline, then make a Licence object""" # Set up the schema self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) diff --git a/tests/test_list.py b/tests/test_list.py index 97be0771be..082c7ffffd 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -20,7 +20,7 @@ class TestLint(unittest.TestCase): @mock.patch("subprocess.check_output") def test_working_listcall(self, mock_subprocess): - """ Test that listing pipelines works """ + """Test that listing pipelines works""" wf_table = nf_core.list.list_workflows() console = Console(record=True) console.print(wf_table) @@ -30,7 +30,7 @@ def test_working_listcall(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_archived(self, mock_subprocess): - """ Test that listing pipelines works, showing archived pipelines """ + """Test that listing pipelines works, showing archived pipelines""" wf_table = nf_core.list.list_workflows(show_archived=True) console = Console(record=True) console.print(wf_table) @@ -39,7 +39,7 @@ def test_working_listcall_archived(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_json(self, mock_subprocess): - """ Test that listing pipelines with JSON works """ + """Test that listing pipelines with JSON works""" wf_json_str = nf_core.list.list_workflows(as_json=True) wf_json = json.loads(wf_json_str) for wf in wf_json["remote_workflows"]: @@ -49,7 +49,7 @@ def test_working_listcall_json(self, mock_subprocess): raise AssertionError("Could not find ampliseq in JSON") def test_pretty_datetime(self): - """ Test that the pretty datetime function works """ + """Test that the pretty datetime function works""" now = datetime.now() nf_core.list.pretty_date(now) now_ts = time.mktime(now.timetuple()) diff --git a/tests/test_modules.py b/tests/test_modules.py index 201a29f6bc..c6ff0a9398 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -16,7 +16,7 @@ class TestModules(unittest.TestCase): """Class for modules tests""" def setUp(self): - """ Create a new PipelineSchema and Launch objects """ + """Create a new PipelineSchema and Launch objects""" # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") @@ -26,13 +26,13 @@ def setUp(self): self.mods.pipeline_dir = self.pipeline_dir def test_modulesrepo_class(self): - """ Initialise a modules repo object """ + """Initialise a modules repo object""" modrepo = nf_core.modules.ModulesRepo() assert modrepo.name == "nf-core/modules" assert modrepo.branch == "master" def test_modules_list(self): - """ Test listing available modules """ + """Test listing available modules""" self.mods.pipeline_dir = None listed_mods = self.mods.list_modules() console = Console(record=True) @@ -41,54 +41,54 @@ def test_modules_list(self): assert "fastqc" in output def test_modules_install_nopipeline(self): - """ Test installing a module - no pipeline given """ + """Test installing a module - no pipeline given""" self.mods.pipeline_dir = None assert self.mods.install("foo") is False def test_modules_install_emptypipeline(self): - """ Test installing a module - empty dir given """ + """Test installing a module - empty dir given""" self.mods.pipeline_dir = tempfile.mkdtemp() with pytest.raises(UserWarning) as excinfo: self.mods.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) def test_modules_install_nomodule(self): - """ Test installing a module - unrecognised module given """ + """Test installing a module - unrecognised module given""" assert self.mods.install("foo") is False def test_modules_install_fastqc(self): - """ Test installing a module - FastQC """ + """Test installing a module - FastQC""" assert self.mods.install("fastqc") is not False module_path = os.path.join(self.mods.pipeline_dir, "modules", "nf-core", "software", "fastqc") assert os.path.exists(module_path) def test_modules_install_fastqc_alternative_source(self): - """ Test installing a module from a different source repository - FastQC """ + """Test installing a module from a different source repository - FastQC""" mods = nf_core.modules.PipelineModules() - mods.modules_repo = nf_core.modules.ModulesRepo(repo='ewels/nf-core-modules', branch='master') + mods.modules_repo = nf_core.modules.ModulesRepo(repo="ewels/nf-core-modules", branch="master") mods.pipeline_dir = self.pipeline_dir assert mods.install("fastqc") is not False module_path = os.path.join(self.mods.pipeline_dir, "modules", "ewels", "software", "fastqc") assert os.path.exists(module_path) def test_modules_install_fastqc_twice(self): - """ Test installing a module - FastQC already there """ + """Test installing a module - FastQC already there""" self.mods.install("fastqc") assert self.mods.install("fastqc") is False def test_modules_remove_fastqc(self): - """ Test removing FastQC module after installing it""" + """Test removing FastQC module after installing it""" self.mods.install("fastqc") module_path = os.path.join(self.mods.pipeline_dir, "modules", "nf-core", "software", "fastqc") assert self.mods.remove("fastqc") assert os.path.exists(module_path) is False def test_modules_remove_fastqc_uninstalled(self): - """ Test removing FastQC module without installing it """ + """Test removing FastQC module without installing it""" assert self.mods.remove("fastqc") is False def test_modules_lint_fastqc(self): - """ Test linting the fastqc module """ + """Test linting the fastqc module""" self.mods.install("fastqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) @@ -97,7 +97,7 @@ def test_modules_lint_fastqc(self): assert len(module_lint.failed) == 0 def test_modules_lint_empty(self): - """ Test linting a pipeline with no modules installed """ + """Test linting a pipeline with no modules installed""" module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.passed) == 0 @@ -105,13 +105,13 @@ def test_modules_lint_empty(self): assert len(module_lint.failed) == 0 def test_modules_create_succeed(self): - """ Succeed at creating the FastQC module """ + """Succeed at creating the FastQC module""" module_create = nf_core.modules.ModuleCreate(self.pipeline_dir, "fastqc", "@author", "process_low", True, True) module_create.create() assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "fastqc.nf")) def test_modules_create_fail_exists(self): - """ Fail at creating the same module twice""" + """Fail at creating the same module twice""" module_create = nf_core.modules.ModuleCreate( self.pipeline_dir, "fastqc", "@author", "process_low", False, False ) diff --git a/tests/test_schema.py b/tests/test_schema.py index 2f29a1f0bd..7d37636a0d 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -20,7 +20,7 @@ class TestSchema(unittest.TestCase): """Class for schema tests""" def setUp(self): - """ Create a new PipelineSchema object """ + """Create a new PipelineSchema object""" self.schema_obj = nf_core.schema.PipelineSchema() self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Copy the template to a temp directory so that we can use that for tests @@ -30,19 +30,19 @@ def setUp(self): self.template_schema = os.path.join(self.template_dir, "nextflow_schema.json") def test_load_lint_schema(self): - """ Check linting with the pipeline template directory """ + """Check linting with the pipeline template directory""" self.schema_obj.get_schema_path(self.template_dir) self.schema_obj.load_lint_schema() @pytest.mark.xfail(raises=AssertionError, strict=True) def test_load_lint_schema_nofile(self): - """ Check that linting raises properly if a non-existant file is given """ + """Check that linting raises properly if a non-existant file is given""" self.schema_obj.get_schema_path("fake_file") self.schema_obj.load_lint_schema() @pytest.mark.xfail(raises=AssertionError, strict=True) def test_load_lint_schema_notjson(self): - """ Check that linting raises properly if a non-JSON file is given """ + """Check that linting raises properly if a non-JSON file is given""" self.schema_obj.get_schema_path(os.path.join(self.template_dir, "nextflow.config")) self.schema_obj.load_lint_schema() @@ -59,20 +59,20 @@ def test_load_lint_schema_noparams(self): self.schema_obj.load_lint_schema() def test_get_schema_path_dir(self): - """ Get schema file from directory """ + """Get schema file from directory""" self.schema_obj.get_schema_path(self.template_dir) def test_get_schema_path_path(self): - """ Get schema file from a path """ + """Get schema file from a path""" self.schema_obj.get_schema_path(self.template_schema) @pytest.mark.xfail(raises=AssertionError, strict=True) def test_get_schema_path_path_notexist(self): - """ Get schema file from a path """ + """Get schema file from a path""" self.schema_obj.get_schema_path("fubar", local_only=True) def test_get_schema_path_name(self): - """ Get schema file from the name of a remote pipeline """ + """Get schema file from the name of a remote pipeline""" self.schema_obj.get_schema_path("atacseq") @pytest.mark.xfail(raises=AssertionError, strict=True) @@ -84,12 +84,12 @@ def test_get_schema_path_name_notexist(self): self.schema_obj.get_schema_path("exoseq") def test_load_schema(self): - """ Try to load a schema from a file """ + """Try to load a schema from a file""" self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() def test_save_schema(self): - """ Try to save a schema """ + """Try to save a schema""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -100,7 +100,7 @@ def test_save_schema(self): self.schema_obj.save_schema() def test_load_input_params_json(self): - """ Try to load a JSON file with params for a pipeline run """ + """Try to load a JSON file with params for a pipeline run""" # Make a temporary file to write schema to tmp_file = tempfile.NamedTemporaryFile() with open(tmp_file.name, "w") as fh: @@ -108,7 +108,7 @@ def test_load_input_params_json(self): self.schema_obj.load_input_params(tmp_file.name) def test_load_input_params_yaml(self): - """ Try to load a YAML file with params for a pipeline run """ + """Try to load a YAML file with params for a pipeline run""" # Make a temporary file to write schema to tmp_file = tempfile.NamedTemporaryFile() with open(tmp_file.name, "w") as fh: @@ -117,11 +117,11 @@ def test_load_input_params_yaml(self): @pytest.mark.xfail(raises=AssertionError, strict=True) def test_load_input_params_invalid(self): - """ Check failure when a non-existent file params file is loaded """ + """Check failure when a non-existent file params file is loaded""" self.schema_obj.load_input_params("fubar") def test_validate_params_pass(self): - """ Try validating a set of parameters against a schema """ + """Try validating a set of parameters against a schema""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -129,7 +129,7 @@ def test_validate_params_pass(self): assert self.schema_obj.validate_params() def test_validate_params_fail(self): - """ Check that False is returned if params don't validate against a schema """ + """Check that False is returned if params don't validate against a schema""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -137,7 +137,7 @@ def test_validate_params_fail(self): assert not self.schema_obj.validate_params() def test_validate_schema_pass(self): - """ Check that the schema validation passes """ + """Check that the schema validation passes""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -145,7 +145,7 @@ def test_validate_schema_pass(self): @pytest.mark.xfail(raises=AssertionError, strict=True) def test_validate_schema_fail_noparams(self): - """ Check that the schema validation fails when no params described """ + """Check that the schema validation fails when no params described""" self.schema_obj.schema = {"type": "invalidthing"} self.schema_obj.validate_schema(self.schema_obj.schema) @@ -196,7 +196,7 @@ def test_validate_schema_fail_unexpected_allof(self): assert e.args[0] == "Subschema `groupThree` found in `allOf` but not `definitions`" def test_make_skeleton_schema(self): - """ Test making a new schema skeleton """ + """Test making a new schema skeleton""" self.schema_obj.schema_filename = self.template_schema self.schema_obj.pipeline_manifest["name"] = "nf-core/test" self.schema_obj.pipeline_manifest["description"] = "Test pipeline" @@ -204,24 +204,24 @@ def test_make_skeleton_schema(self): self.schema_obj.validate_schema(self.schema_obj.schema) def test_get_wf_params(self): - """ Test getting the workflow parameters from a pipeline """ + """Test getting the workflow parameters from a pipeline""" self.schema_obj.schema_filename = self.template_schema self.schema_obj.get_wf_params() def test_prompt_remove_schema_notfound_config_returntrue(self): - """ Remove unrecognised params from the schema """ + """Remove unrecognised params from the schema""" self.schema_obj.pipeline_params = {"foo": "bar"} self.schema_obj.no_prompts = True assert self.schema_obj.prompt_remove_schema_notfound_config("baz") def test_prompt_remove_schema_notfound_config_returnfalse(self): - """ Do not remove unrecognised params from the schema """ + """Do not remove unrecognised params from the schema""" self.schema_obj.pipeline_params = {"foo": "bar"} self.schema_obj.no_prompts = True assert not self.schema_obj.prompt_remove_schema_notfound_config("foo") def test_remove_schema_notfound_configs(self): - """ Remove unrecognised params from the schema """ + """Remove unrecognised params from the schema""" self.schema_obj.schema = { "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, "required": ["foo"], @@ -256,7 +256,7 @@ def test_remove_schema_notfound_configs_childschema(self): assert "foo" in params_removed def test_add_schema_found_configs(self): - """ Try adding a new parameter to the schema from the config """ + """Try adding a new parameter to the schema from the config""" self.schema_obj.pipeline_params = {"foo": "bar"} self.schema_obj.schema = {"properties": {}} self.schema_obj.no_prompts = True @@ -266,23 +266,23 @@ def test_add_schema_found_configs(self): assert "foo" in params_added def test_build_schema_param_str(self): - """ Build a new schema param from a config value (string) """ + """Build a new schema param from a config value (string)""" param = self.schema_obj.build_schema_param("foo") assert param == {"type": "string", "default": "foo"} def test_build_schema_param_bool(self): - """ Build a new schema param from a config value (bool) """ + """Build a new schema param from a config value (bool)""" param = self.schema_obj.build_schema_param("True") print(param) assert param == {"type": "boolean", "default": True} def test_build_schema_param_int(self): - """ Build a new schema param from a config value (int) """ + """Build a new schema param from a config value (int)""" param = self.schema_obj.build_schema_param("12") assert param == {"type": "integer", "default": 12} def test_build_schema_param_int(self): - """ Build a new schema param from a config value (float) """ + """Build a new schema param from a config value (float)""" param = self.schema_obj.build_schema_param("12.34") assert param == {"type": "number", "default": 12.34} @@ -309,7 +309,7 @@ def test_build_schema_from_scratch(self): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_launch_web_builder_timeout(self, mock_post): - """ Mock launching the web builder, but timeout on the request """ + """Mock launching the web builder, but timeout on the request""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.Timeout() self.schema_obj.launch_web_builder() @@ -317,7 +317,7 @@ def test_launch_web_builder_timeout(self, mock_post): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_launch_web_builder_connection_error(self, mock_post): - """ Mock launching the web builder, but get a connection error """ + """Mock launching the web builder, but get a connection error""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.ConnectionError() self.schema_obj.launch_web_builder() @@ -325,7 +325,7 @@ def test_launch_web_builder_connection_error(self, mock_post): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_get_web_builder_response_timeout(self, mock_post): - """ Mock checking for a web builder response, but timeout on the request """ + """Mock checking for a web builder response, but timeout on the request""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.Timeout() self.schema_obj.launch_web_builder() @@ -333,13 +333,13 @@ def test_get_web_builder_response_timeout(self, mock_post): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_get_web_builder_response_connection_error(self, mock_post): - """ Mock checking for a web builder response, but get a connection error """ + """Mock checking for a web builder response, but get a connection error""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.ConnectionError() self.schema_obj.launch_web_builder() def mocked_requests_post(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -359,7 +359,7 @@ def __init__(self, data, status_code): @mock.patch("requests.post", side_effect=mocked_requests_post) def test_launch_web_builder_404(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "invalid_url" try: self.schema_obj.launch_web_builder() @@ -369,7 +369,7 @@ def test_launch_web_builder_404(self, mock_post): @mock.patch("requests.post", side_effect=mocked_requests_post) def test_launch_web_builder_invalid_status(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "valid_url_error" try: self.schema_obj.launch_web_builder() @@ -380,7 +380,7 @@ def test_launch_web_builder_invalid_status(self, mock_post): @mock.patch("requests.get") @mock.patch("webbrowser.open") def test_launch_web_builder_success(self, mock_post, mock_get, mock_webbrowser): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "valid_url_success" try: self.schema_obj.launch_web_builder() @@ -390,7 +390,7 @@ def test_launch_web_builder_success(self, mock_post, mock_get, mock_webbrowser): assert e.args[0].startswith("Could not access remote API results: https://nf-co.re") def mocked_requests_get(*args, **kwargs): - """ Helper function to emulate GET requests responses from the web """ + """Helper function to emulate GET requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -414,7 +414,7 @@ def __init__(self, data, status_code): @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_404(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "invalid_url" try: self.schema_obj.get_web_builder_response() @@ -424,7 +424,7 @@ def test_get_web_builder_response_404(self, mock_post): @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_error(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_error" try: self.schema_obj.get_web_builder_response() @@ -434,13 +434,13 @@ def test_get_web_builder_response_error(self, mock_post): @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_waiting(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_waiting" assert self.schema_obj.get_web_builder_response() is False @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_saved(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_saved" try: self.schema_obj.get_web_builder_response() diff --git a/tests/test_sync.py b/tests/test_sync.py index 3900a95d5d..ce7d07dc7f 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -20,13 +20,13 @@ def setUp(self): self.make_new_pipeline() def make_new_pipeline(self): - """ Create a new pipeline to test """ + """Create a new pipeline to test""" self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) self.create_obj.init_pipeline() def test_inspect_sync_dir_notgit(self): - """ Try syncing an empty directory """ + """Try syncing an empty directory""" psync = nf_core.sync.PipelineSync(tempfile.mkdtemp()) try: psync.inspect_sync_dir() @@ -35,7 +35,7 @@ def test_inspect_sync_dir_notgit(self): assert "does not appear to be a git repository" in e.args[0] def test_inspect_sync_dir_dirty(self): - """ Try syncing a pipeline with uncommitted changes """ + """Try syncing a pipeline with uncommitted changes""" # Add an empty file, uncommitted test_fn = os.path.join(self.pipeline_dir, "uncommitted") open(test_fn, "a").close() @@ -52,7 +52,7 @@ def test_inspect_sync_dir_dirty(self): raise e def test_get_wf_config_no_branch(self): - """ Try getting a workflow config when the branch doesn't exist """ + """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo") try: @@ -63,7 +63,7 @@ def test_get_wf_config_no_branch(self): assert e.args[0] == "Branch `foo` not found!" def test_get_wf_config_missing_required_config(self): - """ Try getting a workflow config, then make it miss a required config option """ + """Try getting a workflow config, then make it miss a required config option""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] @@ -78,14 +78,14 @@ def test_get_wf_config_missing_required_config(self): assert e.args[0] == "Workflow config variable `fakethisdoesnotexist` not found!" def test_checkout_template_branch(self): - """ Try checking out the TEMPLATE branch of the pipeline """ + """Try checking out the TEMPLATE branch of the pipeline""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() def test_delete_template_branch_files(self): - """ Confirm that we can delete all files in the TEMPLATE branch """ + """Confirm that we can delete all files in the TEMPLATE branch""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -94,7 +94,7 @@ def test_delete_template_branch_files(self): assert os.listdir(self.pipeline_dir) == [".git"] def test_create_template_pipeline(self): - """ Confirm that we can delete all files in the TEMPLATE branch """ + """Confirm that we can delete all files in the TEMPLATE branch""" # First, delete all the files psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -108,7 +108,7 @@ def test_create_template_pipeline(self): assert "nextflow.config" in os.listdir(self.pipeline_dir) def test_commit_template_changes_nochanges(self): - """ Try to commit the TEMPLATE branch, but no changes were made """ + """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -118,7 +118,7 @@ def test_commit_template_changes_nochanges(self): assert psync.commit_template_changes() is False def test_commit_template_changes_changes(self): - """ Try to commit the TEMPLATE branch, but no changes were made """ + """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -135,11 +135,11 @@ def test_commit_template_changes_changes(self): assert psync.repo.is_dirty(untracked_files=True) is False def raise_git_exception(self): - """ Raise an exception from GitPython""" + """Raise an exception from GitPython""" raise git.exc.GitCommandError("Test") def test_push_template_branch_error(self): - """ Try pushing the changes, but without a remote (should fail) """ + """Try pushing the changes, but without a remote (should fail)""" # Check out the TEMPLATE branch but skip making the new template etc. psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -157,7 +157,7 @@ def test_push_template_branch_error(self): assert e.args[0].startswith("Could not push TEMPLATE branch") def mocked_requests_get(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -172,7 +172,7 @@ def __init__(self, data, status_code): return MockResponse({"get_url": kwargs["url"]}, 404) def mocked_requests_patch(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -186,7 +186,7 @@ def __init__(self, data, status_code): return MockResponse({"patch_url": kwargs["url"]}, 404) def mocked_requests_post(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -203,7 +203,7 @@ def __init__(self, data, status_code): @mock.patch("requests.get", side_effect=mocked_requests_get) @mock.patch("requests.post", side_effect=mocked_requests_post) def test_make_pull_request_success(self, mock_get, mock_post): - """ Try making a PR - successful response """ + """Try making a PR - successful response""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.gh_username = "no_existing_pr" psync.gh_repo = "no_existing_pr/response" @@ -214,7 +214,7 @@ def test_make_pull_request_success(self, mock_get, mock_post): @mock.patch("requests.get", side_effect=mocked_requests_get) @mock.patch("requests.post", side_effect=mocked_requests_post) def test_make_pull_request_bad_response(self, mock_get, mock_post): - """ Try making a PR and getting a 404 error """ + """Try making a PR and getting a 404 error""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" diff --git a/tests/test_utils.py b/tests/test_utils.py index 542a28ee28..c6947861c7 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -129,6 +129,6 @@ def test_pip_package_connection_error(self, mock_get): nf_core.utils.pip_package("multiqc=1.10") def test_pip_erroneous_package(self): - """ Tests the PyPi API package information query """ + """Tests the PyPi API package information query""" with pytest.raises(ValueError): nf_core.utils.pip_package("not_a_package=1.0") From ec7d48652a714f4b97388fe4f64aa0c47e943f62 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 28 Apr 2021 16:17:39 +0200 Subject: [PATCH 130/210] Fix syntax error --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 31c36a7bf0..615e013c88 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -205,7 +205,7 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all @nf_core_cli.command(help_priority=3) @click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--release", type=str help="Pipeline release") +@click.option("-r", "--release", type=str, help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( "-c", From 7332d41636bda4b7aaf16a4faada336efb1a5fc8 Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Wed, 28 Apr 2021 15:23:15 +0100 Subject: [PATCH 131/210] Added removal test for alternative source repo name --- tests/test_modules.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index c6ff0a9398..c1538a017d 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -24,6 +24,9 @@ def setUp(self): shutil.copytree(self.template_dir, self.pipeline_dir) self.mods = nf_core.modules.PipelineModules() self.mods.pipeline_dir = self.pipeline_dir + self.mods_alt = nf_core.modules.PipelineModules() + self.mods_alt.pipeline_dir = self.pipeline_dir + self.mods_alt.modules_repo = nf_core.modules.ModulesRepo(repo="ewels/nf-core-modules", branch="master") def test_modulesrepo_class(self): """Initialise a modules repo object""" @@ -64,10 +67,7 @@ def test_modules_install_fastqc(self): def test_modules_install_fastqc_alternative_source(self): """Test installing a module from a different source repository - FastQC""" - mods = nf_core.modules.PipelineModules() - mods.modules_repo = nf_core.modules.ModulesRepo(repo="ewels/nf-core-modules", branch="master") - mods.pipeline_dir = self.pipeline_dir - assert mods.install("fastqc") is not False + assert self.mods_alt.install("fastqc") is not False module_path = os.path.join(self.mods.pipeline_dir, "modules", "ewels", "software", "fastqc") assert os.path.exists(module_path) @@ -83,6 +83,13 @@ def test_modules_remove_fastqc(self): assert self.mods.remove("fastqc") assert os.path.exists(module_path) is False + def test_modules_remove_fastqc_alternative_source(self): + """Test removing FastQC module after installing it from an alternative source""" + self.mods_alt.install("fastqc") + module_path = os.path.join(self.mods.pipeline_dir, "modules", "ewels", "software", "fastqc") + assert self.mods_alt.remove("fastqc") + assert os.path.exists(module_path) is False + def test_modules_remove_fastqc_uninstalled(self): """Test removing FastQC module without installing it""" assert self.mods.remove("fastqc") is False From d06e3cc464a8e6a0ebe577a0ba0f1588384092b4 Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Wed, 28 Apr 2021 15:27:33 +0100 Subject: [PATCH 132/210] Updated CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bfdb3942c1..742848188d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ * New `-k`/`--key` cli option for `nf-core lint` to allow you to run only named lint tests, for faster local debugging * Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory * New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] +* Modules installed from alternative sources are put in folders based on the name of the source repository ### Template From 115c11269712aca3d2b021ac59d4eb3a094a52d3 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 28 Apr 2021 16:54:48 +0200 Subject: [PATCH 133/210] Testing and refactoring --- nf_core/download.py | 163 ++++++++++++++++++++++++-------------------- 1 file changed, 89 insertions(+), 74 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 6a9995e91e..5f3291982c 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -81,7 +81,7 @@ def __init__( outdir=None, compress_type=None, force=False, - container="none", + container=None, singularity_cache_only=False, parallel_downloads=4, ): @@ -90,19 +90,10 @@ def __init__( self.outdir = outdir self.output_filename = None self.compress_type = compress_type - if self.compress_type is None: - self.compress_type = self._confirm_compression() - if self.compress_type == "none": - self.compress_type = None - self.force = force - - if container is None: - container = self._confirm_container_download() + self.container = container self.singularity = container == "singularity" self.singularity_cache_only = singularity_cache_only - if self.singularity_cache_only is None and self.singularity: - self.singularity_cache_only = self._confirm_singularity_cache() self.parallel_downloads = parallel_downloads # Sanity checks @@ -116,6 +107,11 @@ def __init__( self.nf_config = dict() self.containers = list() + # Fetch remote workflows + self.wfs = nf_core.list.Workflows() + self.wfs.get_remote_workflows() + self.wf_branches = {} + def _confirm_compression(self): return questionary.select( "Choose compression type:", @@ -139,29 +135,12 @@ def _confirm_singularity_cache(self): def download_workflow(self): """Starts a nf-core workflow download.""" - # Fetch remote workflows - wfs = nf_core.list.Workflows() - wfs.get_remote_workflows() - - # Prompt user if pipeline name was not specified - if self.pipeline is None: - self.pipeline = questionary.autocomplete( - "Pipeline name:", - choices=[wf.name for wf in wfs.remote_workflows], - style=nf_core.utils.nfcore_question_style, - ).ask() - # Prompt user for release tag if '--release' was set - if self.release is None: - try: - release_tags = self.fetch_release_tags() - except LookupError: - sys.exit(1) - self.release = questionary.select("Select release:", choices=release_tags).ask() + self.prompt_inputs() # Get workflow details try: - self.fetch_workflow_details(wfs) + self.fetch_workflow_details() except LookupError: sys.exit(1) @@ -232,6 +211,41 @@ def download_workflow(self): log.info("Compressing download..") self.compress_download() + def prompt_inputs(self): + """Interactively prompt the user for any missing flags""" + + # Prompt user if pipeline name was not specified + if self.pipeline is None: + self.pipeline = questionary.autocomplete( + "Pipeline name:", + choices=[wf.name for wf in self.wfs.remote_workflows], + style=nf_core.utils.nfcore_question_style, + ).ask() + + # Prompt user for release tag if '--release' was not set + if self.release is None: + try: + release_tags = self.fetch_release_tags() + except LookupError: + sys.exit(1) + self.release = questionary.select("Select release / branch:", choices=release_tags).ask() + + # Download singularity container? + if self.container is None: + self.container = self._confirm_container_download() + + # Use $NXF_SINGULARITY_CACHEDIR ? + if self.singularity_cache_only is None and self.singularity: + self.singularity_cache_only = self._confirm_singularity_cache() + + # Compress the downloaded files? + if self.compress_type is None: + self.compress_type = self._confirm_compression() + + # Correct type for no-compression + if self.compress_type == "none": + self.compress_type = None + def fetch_release_tags(self): """Fetches tag names of pipeline releases from github @@ -241,67 +255,68 @@ def fetch_release_tags(self): Raises: LookupError, if no releases were found """ - # Fetch releases from github api - releases_url = "https://api.github.com/repos/nf-core/{}/releases".format(self.pipeline) - response = requests.get(releases_url) + + release_tags = [] + + # We get releases from https://nf-co.re/pipelines.json + for wf in self.wfs.remote_workflows: + if wf.full_name == self.pipeline or wf.name == self.pipeline: + if len(wf.releases) > 0: + releases = sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) + release_tags = list(map(lambda release: release.get("tag_name", None), releases)) + + # Fetch branches from github api + branches_url = "https://api.github.com/repos/nf-core/{}/branches".format(self.pipeline) + branch_response = requests.get(branches_url) # Filter out the release tags and sort them - release_tags = map(lambda release: release.get("tag_name", None), response.json()) - release_tags = filter(lambda tag: tag != None, release_tags) - release_tags = list(release_tags) - if len(release_tags) == 0: - log.error("Unable to find any releases!") - raise LookupError - release_tags = sorted(release_tags, key=lambda tag: tuple(tag.split(".")), reverse=True) + for branch in branch_response.json(): + self.wf_branches[branch["name"]] = branch["commit"]["sha"] + release_tags.extend( + [ + b + for b in self.wf_branches.keys() + if b != "TEMPLATE" and b != "initial_commit" and not b.startswith("nf-core-template-merge") + ] + ) + return release_tags - def fetch_workflow_details(self, wfs): + def fetch_workflow_details(self): """Fetches details of a nf-core workflow to download. - Args: - wfs (nf_core.list.Workflows): A nf_core.list.Workflows object - Raises: LockupError, if the pipeline can not be found. """ # Get workflow download details - for wf in wfs.remote_workflows: + for wf in self.wfs.remote_workflows: if wf.full_name == self.pipeline or wf.name == self.pipeline: # Set pipeline name self.wf_name = wf.name - # Find latest release hash - if self.release is None and len(wf.releases) > 0: - # Sort list of releases so that most recent is first - wf.releases = sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) - self.release = wf.releases[0]["tag_name"] - self.wf_sha = wf.releases[0]["tag_sha"] - log.debug("No release specified. Using latest release: {}".format(self.release)) - # Find specified release hash - elif self.release is not None: - for r in wf.releases: - if r["tag_name"] == self.release.lstrip("v"): - self.wf_sha = r["tag_sha"] - break + # Find specified release / branch hash + if self.release is not None: + + # Branch + if self.release in self.wf_branches.keys(): + self.wf_sha = self.wf_branches[self.release] + + # Release else: - log.error("Not able to find release '{}' for {}".format(self.release, wf.full_name)) - log.info( - "Available {} releases: {}".format( - wf.full_name, ", ".join([r["tag_name"] for r in wf.releases]) + for r in wf.releases: + if r["tag_name"] == self.release.lstrip("v"): + self.wf_sha = r["tag_sha"] + break + else: + log.error("Not able to find release '{}' for {}".format(self.release, wf.full_name)) + log.info( + "Available {} releases: {}".format( + wf.full_name, ", ".join([r["tag_name"] for r in wf.releases]) + ) ) - ) - raise LookupError("Not able to find release '{}' for {}".format(self.release, wf.full_name)) - - # Must be a dev-only pipeline - elif not self.release: - self.release = "dev" - self.wf_sha = "master" # Cheating a little, but GitHub download link works - log.warning( - "Pipeline is in development - downloading current code on master branch.\n" - + "This is likely to change soon should not be considered fully reproducible." - ) + raise LookupError("Not able to find release '{}' for {}".format(self.release, wf.full_name)) # Set outdir name if not defined if not self.outdir: @@ -330,7 +345,7 @@ def fetch_workflow_details(self, wfs): self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.pipeline, self.release) else: log.error("Not able to find pipeline '{}'".format(self.pipeline)) - log.info("Available pipelines: {}".format(", ".join([w.name for w in wfs.remote_workflows]))) + log.info("Available pipelines: {}".format(", ".join([w.name for w in self.wfs.remote_workflows]))) raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) def download_wf_files(self): From 935b64e4cb8c0a4d4c477eafdb4d8a4d45629e74 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 28 Apr 2021 17:01:11 +0200 Subject: [PATCH 134/210] Fix behaviour when pipeline name doesn't exist --- nf_core/download.py | 46 +++++++++++++++++++++++---------------------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 5f3291982c..32e5d26c89 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -224,11 +224,9 @@ def prompt_inputs(self): # Prompt user for release tag if '--release' was not set if self.release is None: - try: - release_tags = self.fetch_release_tags() - except LookupError: - sys.exit(1) - self.release = questionary.select("Select release / branch:", choices=release_tags).ask() + release_tags = self.fetch_release_tags() + if len(release_tags) > 0: + self.release = questionary.select("Select release / branch:", choices=release_tags).ask() # Download singularity container? if self.container is None: @@ -263,22 +261,26 @@ def fetch_release_tags(self): if wf.full_name == self.pipeline or wf.name == self.pipeline: if len(wf.releases) > 0: releases = sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) - release_tags = list(map(lambda release: release.get("tag_name", None), releases)) - - # Fetch branches from github api - branches_url = "https://api.github.com/repos/nf-core/{}/branches".format(self.pipeline) - branch_response = requests.get(branches_url) - - # Filter out the release tags and sort them - for branch in branch_response.json(): - self.wf_branches[branch["name"]] = branch["commit"]["sha"] - release_tags.extend( - [ - b - for b in self.wf_branches.keys() - if b != "TEMPLATE" and b != "initial_commit" and not b.startswith("nf-core-template-merge") - ] - ) + release_tags = list(map(lambda release: release.get("tag_name"), releases)) + + try: + # Fetch branches from github api + branches_url = f"https://api.github.com/repos/nf-core/{self.pipeline}/branches" + branch_response = requests.get(branches_url) + + # Filter out the release tags and sort them + for branch in branch_response.json(): + self.wf_branches[branch["name"]] = branch["commit"]["sha"] + release_tags.extend( + [ + b + for b in self.wf_branches.keys() + if b != "TEMPLATE" and b != "initial_commit" and not b.startswith("nf-core-template-merge") + ] + ) + except TypeError: + # This will be picked up later if not a repo, just log for now + log.debug("Couldn't fetch branches - invalid repo?") return release_tags @@ -345,7 +347,7 @@ def fetch_workflow_details(self): self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.pipeline, self.release) else: log.error("Not able to find pipeline '{}'".format(self.pipeline)) - log.info("Available pipelines: {}".format(", ".join([w.name for w in self.wfs.remote_workflows]))) + log.info("Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows]))) raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) def download_wf_files(self): From b41337d5fc1d9829e5a66964db1f49e24da51832 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 28 Apr 2021 17:07:15 +0200 Subject: [PATCH 135/210] Better cli styling --- nf_core/download.py | 17 +++++++++++++---- nf_core/launch.py | 4 +++- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 32e5d26c89..12f910636e 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -121,6 +121,7 @@ def _confirm_compression(self): "tar.bz2", "zip", ], + style=nf_core.utils.nfcore_question_style, ).ask() def _confirm_container_download(self): @@ -226,7 +227,9 @@ def prompt_inputs(self): if self.release is None: release_tags = self.fetch_release_tags() if len(release_tags) > 0: - self.release = questionary.select("Select release / branch:", choices=release_tags).ask() + self.release = questionary.select( + "Select release / branch:", choices=release_tags, style=nf_core.utils.nfcore_question_style + ).ask() # Download singularity container? if self.container is None: @@ -347,7 +350,9 @@ def fetch_workflow_details(self): self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.pipeline, self.release) else: log.error("Not able to find pipeline '{}'".format(self.pipeline)) - log.info("Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows]))) + log.info( + "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) + ) raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) def download_wf_files(self): @@ -755,7 +760,11 @@ def compress_download(self): with tarfile.open(self.output_filename, "w:{}".format(ctype)) as tar: tar.add(self.outdir, arcname=os.path.basename(self.outdir)) tar_flags = "xzf" if ctype == "gz" else "xjf" - log.info("Command to extract files: tar -{} {}".format(tar_flags, self.output_filename)) + log.info( + "Command to extract files: [bright_magenta on grey0] tar -{} {} [/]".format( + tar_flags, self.output_filename + ) + ) # .zip files if self.compress_type == "zip": @@ -796,7 +805,7 @@ def validate_md5(self, fname, expected=None): file_hash = hash_md5.hexdigest() if expected is None: - log.info("MD5 checksum for {}: {}".format(fname, file_hash)) + log.info("MD5 checksum for '{}': '{}'".format(fname, file_hash)) else: if file_hash == expected: log.debug("md5 sum of image matches expected: {}".format(expected)) diff --git a/nf_core/launch.py b/nf_core/launch.py index 78bca4aa23..8f131939dd 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -184,7 +184,9 @@ def get_pipeline_schema(self): try: release_tags = self.try_fetch_release_tags() self.pipeline_revision = questionary.select( - "Please select a release:", choices=release_tags + "Please select a release:", + choices=release_tags, + style=nf_core.utils.nfcore_question_style, ).ask() except LookupError: pass From 48f9146ba5c7dd85279fe262e573a22e004a834b Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 28 Apr 2021 19:34:09 +0200 Subject: [PATCH 136/210] Fast fail for non-existant repos --- nf_core/download.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 12f910636e..a7d10f9633 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -137,12 +137,12 @@ def _confirm_singularity_cache(self): def download_workflow(self): """Starts a nf-core workflow download.""" - self.prompt_inputs() - # Get workflow details try: + self.prompt_inputs() self.fetch_workflow_details() - except LookupError: + except LookupError as e: + log.critical(e) sys.exit(1) summary_log = [ @@ -223,6 +223,26 @@ def prompt_inputs(self): style=nf_core.utils.nfcore_question_style, ).ask() + # Fast-fail for unrecognised pipelines (we check again at the end) + for wf in self.wfs.remote_workflows: + if wf.full_name == self.pipeline or wf.name == self.pipeline: + break + else: + # Non nf-core GitHub repo + if self.pipeline.count("/") == 1: + gh_response = requests.get(f"https://api.github.com/repos/{self.pipeline}") + try: + assert gh_response.json()["message"] == "Not Found" + except AssertionError: + pass + else: + raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + else: + log.info( + "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) + ) + raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + # Prompt user for release tag if '--release' was not set if self.release is None: release_tags = self.fetch_release_tags() From 0fc047713a2b4a82e8b229a9d50bf2f04aff32b9 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 28 Apr 2021 19:37:33 +0200 Subject: [PATCH 137/210] More testing - bugfix --- nf_core/download.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index a7d10f9633..496b0fbc04 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -191,7 +191,7 @@ def download_workflow(self): os.remove(self.output_filename) # Summary log - log.info("Saving {}\n {}".format(self.pipeline, "\n ".join(summary_log))) + log.info("Saving '{}'\n {}".format(self.pipeline, "\n ".join(summary_log))) # Download the pipeline files log.info("Downloading workflow files from GitHub") @@ -232,7 +232,7 @@ def prompt_inputs(self): if self.pipeline.count("/") == 1: gh_response = requests.get(f"https://api.github.com/repos/{self.pipeline}") try: - assert gh_response.json()["message"] == "Not Found" + assert gh_response.json().get("message") == "Not Found" except AssertionError: pass else: @@ -357,7 +357,7 @@ def fetch_workflow_details(self): if self.pipeline.count("/") == 1: # Looks like a GitHub address - try working with this repo log.warning("Pipeline name doesn't match any nf-core workflows") - log.info("Pipeline name looks like a GitHub address - attempting to download anyway") + log.info("Pipeline name looks like a GitHub address - attempting to download") self.wf_name = self.pipeline if not self.release: self.release = "master" From 59e06543e366ad0494b8d6ee61bada4a229b0909 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 28 Apr 2021 19:49:24 +0200 Subject: [PATCH 138/210] Clean up + finish refactor for --container instead of --singularity --- nf_core/__main__.py | 4 ++-- nf_core/download.py | 45 ++++++++++++++++++++------------------------- 2 files changed, 22 insertions(+), 27 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 615e013c88..3fffbd24d2 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -215,10 +215,10 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-C", + "-i", "--container", type=click.Choice(["none", "singularity"]), - help="Download images", + help="Download software container images", ) @click.option( "-s", diff --git a/nf_core/download.py b/nf_core/download.py index 496b0fbc04..589b6c2abe 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -92,15 +92,9 @@ def __init__( self.compress_type = compress_type self.force = force self.container = container - self.singularity = container == "singularity" self.singularity_cache_only = singularity_cache_only self.parallel_downloads = parallel_downloads - # Sanity checks - if self.singularity_cache_only and not self.singularity: - log.error("Command has '--singularity-cache' set, but '--container' is 'none'") - sys.exit(1) - self.wf_name = None self.wf_sha = None self.wf_download_url = None @@ -125,11 +119,14 @@ def _confirm_compression(self): ).ask() def _confirm_container_download(self): - should_download = Confirm.ask(f"Should singularity image be downloaded?") - if should_download: - return "singularity" - else: - return "none" + return questionary.select( + "Download software container images:", + choices=[ + "none", + "singularity", + ], + style=nf_core.utils.nfcore_question_style, + ).ask() def _confirm_singularity_cache(self): return Confirm.ask(f"Should singularity image be cached?") @@ -145,11 +142,8 @@ def download_workflow(self): log.critical(e) sys.exit(1) - summary_log = [ - "Pipeline release: '{}'".format(self.release), - "Pull singularity containers: '{}'".format("Yes" if self.singularity else "No"), - ] - if self.singularity: + summary_log = [f"Pipeline release: '{self.release}'", f"Pull containers: '{self.container}'"] + if self.container == "singularity": export_in_file = os.popen('grep -c "export NXF_SINGULARITY_CACHEDIR" ~/.bashrc').read().strip("\n") != "0" if not export_in_file: append_to_file = Confirm.ask("Add 'export NXF_SINGULARITY_CACHEDIR' to .bashrc?") @@ -203,7 +197,7 @@ def download_workflow(self): self.wf_use_local_configs() # Download the singularity images - if self.singularity: + if self.container == "singularity": self.find_container_images() self.get_singularity_images() @@ -256,9 +250,14 @@ def prompt_inputs(self): self.container = self._confirm_container_download() # Use $NXF_SINGULARITY_CACHEDIR ? - if self.singularity_cache_only is None and self.singularity: + if self.singularity_cache_only is None and self.container == "singularity": self.singularity_cache_only = self._confirm_singularity_cache() + # Sanity checks (for cli flags) + if self.singularity_cache_only and self.container != "singularity": + log.error("Command has '--singularity-cache' set, but '--container' is 'none'") + sys.exit(1) + # Compress the downloaded files? if self.compress_type is None: self.compress_type = self._confirm_compression() @@ -427,7 +426,7 @@ def wf_use_local_configs(self): nfconfig = nfconfig.replace(find_str, repl_str) # Append the singularity.cacheDir to the end if we need it - if self.singularity and not self.singularity_cache_only: + if self.container == "singularity" and not self.singularity_cache_only: nfconfig += ( f"\n\n// Added by `nf-core download` v{nf_core.__version__} //\n" + 'singularity.cacheDir = "${projectDir}/../singularity-images/"' @@ -780,11 +779,7 @@ def compress_download(self): with tarfile.open(self.output_filename, "w:{}".format(ctype)) as tar: tar.add(self.outdir, arcname=os.path.basename(self.outdir)) tar_flags = "xzf" if ctype == "gz" else "xjf" - log.info( - "Command to extract files: [bright_magenta on grey0] tar -{} {} [/]".format( - tar_flags, self.output_filename - ) - ) + log.info(f"Command to extract files: [bright_magenta]tar -{tar_flags} {self.output_filename}[/]") # .zip files if self.compress_type == "zip": @@ -825,7 +820,7 @@ def validate_md5(self, fname, expected=None): file_hash = hash_md5.hexdigest() if expected is None: - log.info("MD5 checksum for '{}': '{}'".format(fname, file_hash)) + log.info("MD5 checksum for '{}': [blue]{}[/]".format(fname, file_hash)) else: if file_hash == expected: log.debug("md5 sum of image matches expected: {}".format(expected)) From f3d426f18c56d915e67b1cef988af7accd3a5581 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 29 Apr 2021 08:29:45 +0200 Subject: [PATCH 139/210] fixed lint tests --- tests/test_modules.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index adf6b54035..7eb3fbed7f 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -118,8 +118,8 @@ def test_modules_lint_new_modules(self): """ lint all modules in nf-core/modules repo clone """ module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=True, all_modules=True) - assert len(module_lint.passed) == 16 - assert len(module_lint.warned) == 24 + assert len(module_lint.passed) == 19 + assert len(module_lint.warned) == 23 assert len(module_lint.failed) == 0 def test_modules_create_succeed(self): From 0556a8696dc94125d1c4dbd104aeaa05aaaf6392 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 29 Apr 2021 11:09:31 +0200 Subject: [PATCH 140/210] compare functions.nf to template --- nf_core/modules/lint.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index 21b0deab6d..129f5778e4 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -832,6 +832,25 @@ def lint_functions_nf(self): if contains_all_functions: self.passed.append(("functions_nf_func_exist", "All functions present", self.function_nf)) + # Compare functions.nf file to the most recent template + # Get file content of the module functions.nf + try: + local_copy = open(self.function_nf, "r").read() + except FileNotFoundError as e: + log.error(f"Could not open {self.function_nf}") + + # Get the template file + template_copy_path = os.path.join(os.path.dirname(nf_core.__file__), "module-template/software/functions.nf") + try: + template_copy = open(template_copy_path, "r").read() + except FileNotFoundError as e: + log.error(f"Could not open {template_copy_path}") + + if local_copy != template_copy: + self.warned.append(("function_nf_comparison", "New version of functions.nf available", self.function_nf)) + else: + self.passed.append(("function_nf_comparison", "functions.nf is up to date", self.function_nf)) + def _parse_input(self, line): input = [] # more than one input From a7c8f0bb5f4ea3e6685649074640ce152e5e70ff Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 29 Apr 2021 11:10:53 +0200 Subject: [PATCH 141/210] updated changelog --- CHANGELOG.md | 1 + nf_core/modules/lint.py | 1 + 2 files changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bfdb3942c1..ada9702873 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ * New `-k`/`--key` cli option for `nf-core lint` to allow you to run only named lint tests, for faster local debugging * Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory * New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] +* New modules lint test comparing the `functions.nf` file to the template version ### Template diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index 129f5778e4..fcce1abf25 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -846,6 +846,7 @@ def lint_functions_nf(self): except FileNotFoundError as e: log.error(f"Could not open {template_copy_path}") + # Compare the files if local_copy != template_copy: self.warned.append(("function_nf_comparison", "New version of functions.nf available", self.function_nf)) else: From 0337d036143f8218c85f7f57591e45e32c9d86cb Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Thu, 29 Apr 2021 11:19:25 +0200 Subject: [PATCH 142/210] updated tests --- tests/test_modules.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index 7eb3fbed7f..eb70ad759d 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -102,7 +102,7 @@ def test_modules_lint_fastqc(self): self.mods.install("fastqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.passed) == 19 + assert len(module_lint.passed) == 20 assert len(module_lint.warned) == 0 assert len(module_lint.failed) == 0 @@ -118,7 +118,7 @@ def test_modules_lint_new_modules(self): """ lint all modules in nf-core/modules repo clone """ module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=True, all_modules=True) - assert len(module_lint.passed) == 19 + assert len(module_lint.passed) == 20 assert len(module_lint.warned) == 23 assert len(module_lint.failed) == 0 From bb6b6d2a16ab084f6a9a45e470a23a9c2eb4982f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 29 Apr 2021 23:36:30 +0200 Subject: [PATCH 143/210] Fix Singularity installation check --- nf_core/download.py | 65 ++++++++++++++++++++------------------------- 1 file changed, 29 insertions(+), 36 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 589b6c2abe..03582c1d9b 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -129,7 +129,7 @@ def _confirm_container_download(self): ).ask() def _confirm_singularity_cache(self): - return Confirm.ask(f"Should singularity image be cached?") + return Confirm.ask(f"[blue bold]?[/] [white bold]Should singularity image be cached?[/]") def download_workflow(self): """Starts a nf-core workflow download.""" @@ -199,7 +199,11 @@ def download_workflow(self): # Download the singularity images if self.container == "singularity": self.find_container_images() - self.get_singularity_images() + try: + self.get_singularity_images() + except OSError as e: + log.critical(f"[red]{e}[/]") + sys.exit(1) # Compress into an archive if self.compress_type is not None: @@ -447,7 +451,7 @@ def find_container_images(self): `nextflow config` at the time of writing, so we scrape the pipeline files. """ - log.info("Fetching container names for workflow") + log.debug("Fetching container names for workflow") # Use linting code to parse the pipeline nextflow config self.nf_config = nf_core.utils.fetch_wf_config(os.path.join(self.outdir, "workflow")) @@ -490,11 +494,6 @@ def get_singularity_images(self): if len(self.containers) == 0: log.info("No container names found in workflow") else: - if not os.environ.get("NXF_SINGULARITY_CACHEDIR"): - log.info( - "[magenta]Tip: Set env var $NXF_SINGULARITY_CACHEDIR to use a central cache for container downloads" - ) - with DownloadProgress() as progress: task = progress.add_task("all_containers", total=len(self.containers), progress_type="summary") @@ -537,6 +536,10 @@ def get_singularity_images(self): # Pull using singularity containers_pull.append([container, out_path, cache_path]) + # Exit if we need to pull images and Singularity is not installed + if len(containers_pull) > 0 and shutil.which("singularity") is None: + raise OSError("Images need to be pulled from Docker, but Singularity is not installed") + # Go through each method of fetching containers in order for container in containers_exist: progress.update(task, description="Image file exists") @@ -739,35 +742,25 @@ def singularity_pull_image(self, container, out_path, cache_path, progress): # Progress bar to show that something is happening task = progress.add_task(container, start=False, total=False, progress_type="singularity_pull", current_log="") - # Try to use singularity to pull image - try: - # Run the singularity pull command - proc = subprocess.Popen( - singularity_command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - bufsize=1, - ) - for line in proc.stdout: - log.debug(line.strip()) - progress.update(task, current_log=line.strip()) - - # Copy cached download if we are using the cache - if cache_path: - log.debug("Copying {} from cache: '{}'".format(container, os.path.basename(out_path))) - progress.update(task, current_log="Copying from cache to target directory") - shutil.copyfile(cache_path, out_path) - - progress.remove_task(task) + # Run the singularity pull command + proc = subprocess.Popen( + singularity_command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + bufsize=1, + ) + for line in proc.stdout: + log.debug(line.strip()) + progress.update(task, current_log=line.strip()) + + # Copy cached download if we are using the cache + if cache_path: + log.debug("Copying {} from cache: '{}'".format(container, os.path.basename(out_path))) + progress.update(task, current_log="Copying from cache to target directory") + shutil.copyfile(cache_path, out_path) - except OSError as e: - if e.errno == errno.ENOENT: - # Singularity is not installed - log.error("Singularity is not installed!") - else: - # Something else went wrong with singularity command - raise e + progress.remove_task(task) def compress_download(self): """Take the downloaded files and make a compressed .tar.gz archive.""" From d9c72074d139171b939e90d54c26ce73a4d5fc9d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 00:19:19 +0200 Subject: [PATCH 144/210] Fix / rewrite code for singularity cachedir prompts + bashrc addition --- nf_core/download.py | 72 +++++++++++++++++++++++++++++++++------------ 1 file changed, 53 insertions(+), 19 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 03582c1d9b..9c97d4d385 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -128,9 +128,6 @@ def _confirm_container_download(self): style=nf_core.utils.nfcore_question_style, ).ask() - def _confirm_singularity_cache(self): - return Confirm.ask(f"[blue bold]?[/] [white bold]Should singularity image be cached?[/]") - def download_workflow(self): """Starts a nf-core workflow download.""" @@ -144,21 +141,9 @@ def download_workflow(self): summary_log = [f"Pipeline release: '{self.release}'", f"Pull containers: '{self.container}'"] if self.container == "singularity": - export_in_file = os.popen('grep -c "export NXF_SINGULARITY_CACHEDIR" ~/.bashrc').read().strip("\n") != "0" - if not export_in_file: - append_to_file = Confirm.ask("Add 'export NXF_SINGULARITY_CACHEDIR' to .bashrc?") - if append_to_file: - path = Prompt.ask("Specify the path: ") - try: - with open(os.path.expanduser("~/.bashrc"), "a") as f: - f.write(f"export NXF_SINGULARITY_CACHEDIR={path}\n") - log.info("Successfully wrote to ~/.bashrc") - except FileNotFoundError: - log.error("Unable to find ~/.bashrc") - sys.exit(1) if os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: summary_log.append( - "Using '$NXF_SINGULARITY_CACHEDIR': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) + "Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) ) # Set an output filename now that we have the outdir @@ -254,12 +239,22 @@ def prompt_inputs(self): self.container = self._confirm_container_download() # Use $NXF_SINGULARITY_CACHEDIR ? - if self.singularity_cache_only is None and self.container == "singularity": - self.singularity_cache_only = self._confirm_singularity_cache() + if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is None: + self.set_nxf_singularity_cachedir() + + # Use *only* $NXF_SINGULARITY_CACHEDIR without copying into target? + if ( + self.singularity_cache_only is None + and self.container == "singularity" + and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None + ): + self.singularity_cache_only = Confirm.ask( + f"[blue bold]?[/] [white bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the download folder?[/]" + ) # Sanity checks (for cli flags) if self.singularity_cache_only and self.container != "singularity": - log.error("Command has '--singularity-cache' set, but '--container' is 'none'") + log.error("Command has '--singularity-cache' set, but '--container' is not 'singularity'") sys.exit(1) # Compress the downloaded files? @@ -270,6 +265,45 @@ def prompt_inputs(self): if self.compress_type == "none": self.compress_type = None + def set_nxf_singularity_cachedir(self): + """Ask if the user wants to set a Singularity cache""" + + if Confirm.ask( + f"[blue bold]?[/] [white bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" + ): + cachedir_path = None + while cachedir_path is None: + cachedir_path = os.path.abspath( + Prompt.ask("[blue bold]?[/] [white bold]Specify the path:[/] (leave blank to cancel)") + ) + if cachedir_path == "": + cachedir_path = False + elif not os.path.isdir(cachedir_path): + log.error(f"'{cachedir_path}' is not a directory.") + cachedir_path = None + if cachedir_path: + os.environ["NXF_SINGULARITY_CACHEDIR"] = cachedir_path + + # Ask if user wants this set in their .bashrc + bashrc_path = os.path.expanduser("~/.bashrc") + if not os.path.isfile(bashrc_path): + bashrc_path = os.path.expanduser("~/.bash_profile") + if not os.path.isfile(bashrc_path): + bashrc_path = False + if bashrc_path: + append_to_file = Confirm.ask( + f"[blue bold]?[/] [white bold]Add [green not bold]'export NXF_SINGULARITY_CACHEDIR=\"{cachedir_path}\"'[/] to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" + ) + if append_to_file: + with open(os.path.expanduser(bashrc_path), "a") as f: + f.write( + "\n\n#######################################\n" + f"## Added by `nf-core download` v{nf_core.__version__} ##\n" + + f'export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"' + + "\n#######################################\n" + ) + log.info(f"Successfully wrote to {bashrc_path}") + def fetch_release_tags(self): """Fetches tag names of pipeline releases from github From 1c921d680f9a863887712b437325d3a8a1d853b1 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 00:31:42 +0200 Subject: [PATCH 145/210] Restructure and reorganise prompts code --- nf_core/download.py | 220 +++++++++++++++++++++----------------------- 1 file changed, 104 insertions(+), 116 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 9c97d4d385..4066522563 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -106,34 +106,17 @@ def __init__( self.wfs.get_remote_workflows() self.wf_branches = {} - def _confirm_compression(self): - return questionary.select( - "Choose compression type:", - choices=[ - "none", - "tar.gz", - "tar.bz2", - "zip", - ], - style=nf_core.utils.nfcore_question_style, - ).ask() - - def _confirm_container_download(self): - return questionary.select( - "Download software container images:", - choices=[ - "none", - "singularity", - ], - style=nf_core.utils.nfcore_question_style, - ).ask() - def download_workflow(self): """Starts a nf-core workflow download.""" # Get workflow details try: - self.prompt_inputs() + self.prompt_pipeline_name() + self.prompt_release() + self.prompt_container_download() + self.prompt_use_singularity_cachedir() + self.prompt_singularity_cachedir_only() + self.prompt_compression_type() self.fetch_workflow_details() except LookupError as e: log.critical(e) @@ -195,10 +178,9 @@ def download_workflow(self): log.info("Compressing download..") self.compress_download() - def prompt_inputs(self): - """Interactively prompt the user for any missing flags""" + def prompt_pipeline_name(self): + """Prompt for the pipeline name if not set with a flag""" - # Prompt user if pipeline name was not specified if self.pipeline is None: self.pipeline = questionary.autocomplete( "Pipeline name:", @@ -226,23 +208,99 @@ def prompt_inputs(self): ) raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + def prompt_release(self): + """Prompt for pipeline release / branch""" # Prompt user for release tag if '--release' was not set if self.release is None: - release_tags = self.fetch_release_tags() + release_tags = [] + + # We get releases from https://nf-co.re/pipelines.json + for wf in self.wfs.remote_workflows: + if wf.full_name == self.pipeline or wf.name == self.pipeline: + if len(wf.releases) > 0: + releases = sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) + release_tags = list(map(lambda release: release.get("tag_name"), releases)) + + try: + # Fetch branches from github api + branches_url = f"https://api.github.com/repos/nf-core/{self.pipeline}/branches" + branch_response = requests.get(branches_url) + + # Filter out the release tags and sort them + for branch in branch_response.json(): + self.wf_branches[branch["name"]] = branch["commit"]["sha"] + release_tags.extend( + [ + b + for b in self.wf_branches.keys() + if b != "TEMPLATE" and b != "initial_commit" and not b.startswith("nf-core-template-merge") + ] + ) + except TypeError: + # This will be picked up later if not a repo, just log for now + log.debug("Couldn't fetch branches - invalid repo?") + if len(release_tags) > 0: self.release = questionary.select( "Select release / branch:", choices=release_tags, style=nf_core.utils.nfcore_question_style ).ask() - # Download singularity container? + def prompt_container_download(self): + """Prompt whether to download container images or not""" + if self.container is None: - self.container = self._confirm_container_download() + self.container = questionary.select( + "Download software container images:", + choices=[ + "none", + "singularity", + ], + style=nf_core.utils.nfcore_question_style, + ).ask() - # Use $NXF_SINGULARITY_CACHEDIR ? + def prompt_use_singularity_cachedir(self): + """Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set""" if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is None: - self.set_nxf_singularity_cachedir() + if Confirm.ask( + f"[blue bold]?[/] [white bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" + ): + # Prompt user for a cache directory path + cachedir_path = None + while cachedir_path is None: + cachedir_path = os.path.abspath( + Prompt.ask("[blue bold]?[/] [white bold]Specify the path:[/] (leave blank to cancel)") + ) + if cachedir_path == os.path.abspath(""): + log.error(f"Not using [blue]$NXF_SINGULARITY_CACHEDIR[/]") + cachedir_path = False + elif not os.path.isdir(cachedir_path): + log.error(f"'{cachedir_path}' is not a directory.") + cachedir_path = None + if cachedir_path: + os.environ["NXF_SINGULARITY_CACHEDIR"] = cachedir_path + + # Ask if user wants this set in their .bashrc + bashrc_path = os.path.expanduser("~/.bashrc") + if not os.path.isfile(bashrc_path): + bashrc_path = os.path.expanduser("~/.bash_profile") + if not os.path.isfile(bashrc_path): + bashrc_path = False + if bashrc_path: + append_to_file = Confirm.ask( + f"[blue bold]?[/] [white bold]Add [green not bold]'export NXF_SINGULARITY_CACHEDIR=\"{cachedir_path}\"'[/] to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" + ) + if append_to_file: + with open(os.path.expanduser(bashrc_path), "a") as f: + f.write( + "\n\n#######################################\n" + f"## Added by `nf-core download` v{nf_core.__version__} ##\n" + + f'export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"' + + "\n#######################################\n" + ) + log.info(f"Successfully wrote to {bashrc_path}") - # Use *only* $NXF_SINGULARITY_CACHEDIR without copying into target? + def prompt_singularity_cachedir_only(self): + """Ask if we should *only* use $NXF_SINGULARITY_CACHEDIR without copying into target""" if ( self.singularity_cache_only is None and self.container == "singularity" @@ -252,98 +310,28 @@ def prompt_inputs(self): f"[blue bold]?[/] [white bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the download folder?[/]" ) - # Sanity checks (for cli flags) + # Sanity check if self.singularity_cache_only and self.container != "singularity": - log.error("Command has '--singularity-cache' set, but '--container' is not 'singularity'") - sys.exit(1) + raise LookupError("Command has '--singularity-cache' set, but '--container' is not 'singularity'") - # Compress the downloaded files? + def prompt_compression_type(self): + """Ask user if we should compress the downloaded files""" if self.compress_type is None: - self.compress_type = self._confirm_compression() + self.compress_type = questionary.select( + "Choose compression type:", + choices=[ + "none", + "tar.gz", + "tar.bz2", + "zip", + ], + style=nf_core.utils.nfcore_question_style, + ).ask() # Correct type for no-compression if self.compress_type == "none": self.compress_type = None - def set_nxf_singularity_cachedir(self): - """Ask if the user wants to set a Singularity cache""" - - if Confirm.ask( - f"[blue bold]?[/] [white bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" - ): - cachedir_path = None - while cachedir_path is None: - cachedir_path = os.path.abspath( - Prompt.ask("[blue bold]?[/] [white bold]Specify the path:[/] (leave blank to cancel)") - ) - if cachedir_path == "": - cachedir_path = False - elif not os.path.isdir(cachedir_path): - log.error(f"'{cachedir_path}' is not a directory.") - cachedir_path = None - if cachedir_path: - os.environ["NXF_SINGULARITY_CACHEDIR"] = cachedir_path - - # Ask if user wants this set in their .bashrc - bashrc_path = os.path.expanduser("~/.bashrc") - if not os.path.isfile(bashrc_path): - bashrc_path = os.path.expanduser("~/.bash_profile") - if not os.path.isfile(bashrc_path): - bashrc_path = False - if bashrc_path: - append_to_file = Confirm.ask( - f"[blue bold]?[/] [white bold]Add [green not bold]'export NXF_SINGULARITY_CACHEDIR=\"{cachedir_path}\"'[/] to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" - ) - if append_to_file: - with open(os.path.expanduser(bashrc_path), "a") as f: - f.write( - "\n\n#######################################\n" - f"## Added by `nf-core download` v{nf_core.__version__} ##\n" - + f'export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"' - + "\n#######################################\n" - ) - log.info(f"Successfully wrote to {bashrc_path}") - - def fetch_release_tags(self): - """Fetches tag names of pipeline releases from github - - Returns: - release_tags (list[str]): Returns list of release tags - - Raises: - LookupError, if no releases were found - """ - - release_tags = [] - - # We get releases from https://nf-co.re/pipelines.json - for wf in self.wfs.remote_workflows: - if wf.full_name == self.pipeline or wf.name == self.pipeline: - if len(wf.releases) > 0: - releases = sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) - release_tags = list(map(lambda release: release.get("tag_name"), releases)) - - try: - # Fetch branches from github api - branches_url = f"https://api.github.com/repos/nf-core/{self.pipeline}/branches" - branch_response = requests.get(branches_url) - - # Filter out the release tags and sort them - for branch in branch_response.json(): - self.wf_branches[branch["name"]] = branch["commit"]["sha"] - release_tags.extend( - [ - b - for b in self.wf_branches.keys() - if b != "TEMPLATE" and b != "initial_commit" and not b.startswith("nf-core-template-merge") - ] - ) - except TypeError: - # This will be picked up later if not a repo, just log for now - log.debug("Couldn't fetch branches - invalid repo?") - - return release_tags - def fetch_workflow_details(self): """Fetches details of a nf-core workflow to download. From 9f0fe810fde0b2e5bf956fc745e262b316810947 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 00:39:48 +0200 Subject: [PATCH 146/210] Tweaks to log messages --- nf_core/download.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 4066522563..f7239092b7 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -123,11 +123,10 @@ def download_workflow(self): sys.exit(1) summary_log = [f"Pipeline release: '{self.release}'", f"Pull containers: '{self.container}'"] - if self.container == "singularity": - if os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: - summary_log.append( - "Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) - ) + if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: + summary_log.append( + "Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) + ) # Set an output filename now that we have the outdir if self.compress_type is not None: @@ -297,7 +296,10 @@ def prompt_use_singularity_cachedir(self): + f'export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"' + "\n#######################################\n" ) - log.info(f"Successfully wrote to {bashrc_path}") + log.info(f"Successfully wrote to [blue]{bashrc_path}[/]") + log.warning( + "You will need reload your terminal after the download completes for this to take effect." + ) def prompt_singularity_cachedir_only(self): """Ask if we should *only* use $NXF_SINGULARITY_CACHEDIR without copying into target""" @@ -381,8 +383,7 @@ def fetch_workflow_details(self): # If we got this far, must not be a nf-core pipeline if self.pipeline.count("/") == 1: # Looks like a GitHub address - try working with this repo - log.warning("Pipeline name doesn't match any nf-core workflows") - log.info("Pipeline name looks like a GitHub address - attempting to download") + log.debug("Pipeline name looks like a GitHub address - attempting to download") self.wf_name = self.pipeline if not self.release: self.release = "master" @@ -560,7 +561,7 @@ def get_singularity_images(self): # Exit if we need to pull images and Singularity is not installed if len(containers_pull) > 0 and shutil.which("singularity") is None: - raise OSError("Images need to be pulled from Docker, but Singularity is not installed") + raise OSError("Singularity is needed to pull images, but it is not installed") # Go through each method of fetching containers in order for container in containers_exist: @@ -806,10 +807,10 @@ def compress_download(self): filePath = os.path.join(folderName, filename) # Add file to zip zipObj.write(filePath) - log.info("Command to extract files: unzip {}".format(self.output_filename)) + log.info(f"Command to extract files: [bright_magenta]unzip {self.output_filename}[/]") # Delete original files - log.debug("Deleting uncompressed files: {}".format(self.outdir)) + log.debug(f"Deleting uncompressed files: '{self.outdir}'") shutil.rmtree(self.outdir) # Caclualte md5sum for output file From 24adc00117f9df01eb050758e215334a8df23a25 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 00:48:17 +0200 Subject: [PATCH 147/210] Use new rich console stderr argument, streamline imports --- nf_core/__main__.py | 6 +++--- nf_core/bump_version.py | 3 +-- nf_core/download.py | 29 ++++++++++++++--------------- 3 files changed, 18 insertions(+), 20 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 3fffbd24d2..d4a8daf38d 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -36,7 +36,7 @@ def run_nf_core(): rich.traceback.install(width=200, word_wrap=True, extra_lines=1) # Print nf-core header to STDERR - stderr = rich.console.Console(file=sys.stderr, force_terminal=nf_core.utils.rich_force_colors()) + stderr = rich.console.Console(stderr=True, force_terminal=nf_core.utils.rich_force_colors()) stderr.print("\n[green]{},--.[grey39]/[green],-.".format(" " * 42), highlight=False) stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False) stderr.print("[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) @@ -116,7 +116,7 @@ def nf_core_cli(verbose, log_file): log.addHandler( rich.logging.RichHandler( level=logging.DEBUG if verbose else logging.INFO, - console=rich.console.Console(file=sys.stderr, force_terminal=nf_core.utils.rich_force_colors()), + console=rich.console.Console(stderr=True, force_terminal=nf_core.utils.rich_force_colors()), show_time=False, markup=True, ) @@ -263,7 +263,7 @@ def licences(pipeline, json): # nf-core create def validate_wf_name_prompt(ctx, opts, value): - """ Force the workflow name to meet the nf-core requirements """ + """Force the workflow name to meet the nf-core requirements""" if not re.match(r"^[a-z]+$", value): click.echo("Invalid workflow name: must be lowercase without punctuation.") value = click.prompt(opts.prompt) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index b770cb2e65..7788c9b552 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -3,7 +3,6 @@ a nf-core pipeline. """ -import click import logging import os import re @@ -12,7 +11,7 @@ import nf_core.utils log = logging.getLogger(__name__) -stderr = rich.console.Console(file=sys.stderr, force_terminal=nf_core.utils.rich_force_colors()) +stderr = rich.console.Console(stderr=True, force_terminal=nf_core.utils.rich_force_colors()) def bump_pipeline_version(pipeline_obj, new_version): diff --git a/nf_core/download.py b/nf_core/download.py index f7239092b7..a32df4363e 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -3,7 +3,6 @@ from __future__ import print_function -import errno from io import BytesIO import logging import hashlib @@ -17,18 +16,16 @@ import sys import tarfile import concurrent.futures -from rich.progress import BarColumn, DownloadColumn, TransferSpeedColumn, Progress -from rich.prompt import Confirm, Prompt +import rich +import rich.progress from zipfile import ZipFile import nf_core -import nf_core.list -import nf_core.utils log = logging.getLogger(__name__) -class DownloadProgress(Progress): +class DownloadProgress(rich.progress.Progress): """Custom Progress bar class, allowing us to have two progress bars with different columns / layouts. """ @@ -38,7 +35,7 @@ def get_renderables(self): if task.fields.get("progress_type") == "summary": self.columns = ( "[magenta]{task.description}", - BarColumn(bar_width=None), + rich.progress.BarColumn(bar_width=None), "[progress.percentage]{task.percentage:>3.0f}%", "•", "[green]{task.completed}/{task.total} completed", @@ -46,18 +43,18 @@ def get_renderables(self): if task.fields.get("progress_type") == "download": self.columns = ( "[blue]{task.description}", - BarColumn(bar_width=None), + rich.progress.BarColumn(bar_width=None), "[progress.percentage]{task.percentage:>3.1f}%", "•", - DownloadColumn(), + rich.progress.DownloadColumn(), "•", - TransferSpeedColumn(), + rich.progress.TransferSpeedColumn(), ) if task.fields.get("progress_type") == "singularity_pull": self.columns = ( "[magenta]{task.description}", "[blue]{task.fields[current_log]}", - BarColumn(bar_width=None), + rich.progress.BarColumn(bar_width=None), ) yield self.make_tasks_table([task]) @@ -260,14 +257,16 @@ def prompt_container_download(self): def prompt_use_singularity_cachedir(self): """Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set""" if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is None: - if Confirm.ask( + if rich.prompt.Confirm.ask( f"[blue bold]?[/] [white bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" ): # Prompt user for a cache directory path cachedir_path = None while cachedir_path is None: cachedir_path = os.path.abspath( - Prompt.ask("[blue bold]?[/] [white bold]Specify the path:[/] (leave blank to cancel)") + rich.prompt.Prompt.ask( + "[blue bold]?[/] [white bold]Specify the path:[/] (leave blank to cancel)" + ) ) if cachedir_path == os.path.abspath(""): log.error(f"Not using [blue]$NXF_SINGULARITY_CACHEDIR[/]") @@ -285,7 +284,7 @@ def prompt_use_singularity_cachedir(self): if not os.path.isfile(bashrc_path): bashrc_path = False if bashrc_path: - append_to_file = Confirm.ask( + append_to_file = rich.prompt.Confirm.ask( f"[blue bold]?[/] [white bold]Add [green not bold]'export NXF_SINGULARITY_CACHEDIR=\"{cachedir_path}\"'[/] to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" ) if append_to_file: @@ -308,7 +307,7 @@ def prompt_singularity_cachedir_only(self): and self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None ): - self.singularity_cache_only = Confirm.ask( + self.singularity_cache_only = rich.prompt.Confirm.ask( f"[blue bold]?[/] [white bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the download folder?[/]" ) From fe77d55b8086ea2f7443f0c614252fc316c41b02 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 01:01:35 +0200 Subject: [PATCH 148/210] Unsafe ask, colour select for releases / branches --- nf_core/download.py | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index a32df4363e..7bd8b68bba 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -23,6 +23,7 @@ import nf_core log = logging.getLogger(__name__) +stderr = rich.console.Console(stderr=True, highlight=False, force_terminal=nf_core.utils.rich_force_colors()) class DownloadProgress(rich.progress.Progress): @@ -178,11 +179,12 @@ def prompt_pipeline_name(self): """Prompt for the pipeline name if not set with a flag""" if self.pipeline is None: + stderr.print("Specify the name of a nf-core pipeline or a GitHub repository name (user/repo).") self.pipeline = questionary.autocomplete( "Pipeline name:", choices=[wf.name for wf in self.wfs.remote_workflows], style=nf_core.utils.nfcore_question_style, - ).ask() + ).unsafe_ask() # Fast-fail for unrecognised pipelines (we check again at the end) for wf in self.wfs.remote_workflows: @@ -208,14 +210,16 @@ def prompt_release(self): """Prompt for pipeline release / branch""" # Prompt user for release tag if '--release' was not set if self.release is None: - release_tags = [] + choices = [] # We get releases from https://nf-co.re/pipelines.json for wf in self.wfs.remote_workflows: if wf.full_name == self.pipeline or wf.name == self.pipeline: if len(wf.releases) > 0: releases = sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) - release_tags = list(map(lambda release: release.get("tag_name"), releases)) + for tag in map(lambda release: release.get("tag_name"), releases): + tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] + choices.append(questionary.Choice(title=tag_display, value=tag)) try: # Fetch branches from github api @@ -225,21 +229,25 @@ def prompt_release(self): # Filter out the release tags and sort them for branch in branch_response.json(): self.wf_branches[branch["name"]] = branch["commit"]["sha"] - release_tags.extend( - [ - b - for b in self.wf_branches.keys() - if b != "TEMPLATE" and b != "initial_commit" and not b.startswith("nf-core-template-merge") - ] - ) + + for branch in self.wf_branches.keys(): + if ( + branch != "TEMPLATE" + and branch != "initial_commit" + and not branch.startswith("nf-core-template-merge") + ): + branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] + choices.append(questionary.Choice(title=branch_display, value=branch)) + except TypeError: # This will be picked up later if not a repo, just log for now log.debug("Couldn't fetch branches - invalid repo?") - if len(release_tags) > 0: + if len(choices) > 0: + stderr.print("\nChoose the release or branch that should be downloaded.") self.release = questionary.select( - "Select release / branch:", choices=release_tags, style=nf_core.utils.nfcore_question_style - ).ask() + "Select release / branch:", choices=choices, style=nf_core.utils.nfcore_question_style + ).unsafe_ask() def prompt_container_download(self): """Prompt whether to download container images or not""" @@ -252,7 +260,7 @@ def prompt_container_download(self): "singularity", ], style=nf_core.utils.nfcore_question_style, - ).ask() + ).unsafe_ask() def prompt_use_singularity_cachedir(self): """Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set""" @@ -327,7 +335,7 @@ def prompt_compression_type(self): "zip", ], style=nf_core.utils.nfcore_question_style, - ).ask() + ).unsafe_ask() # Correct type for no-compression if self.compress_type == "none": From 3c8006784833ed2a9c9c6155934b318d00f7e166 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 01:16:21 +0200 Subject: [PATCH 149/210] Write some help text for prompts --- nf_core/download.py | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 7bd8b68bba..956018e267 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -23,7 +23,9 @@ import nf_core log = logging.getLogger(__name__) -stderr = rich.console.Console(stderr=True, highlight=False, force_terminal=nf_core.utils.rich_force_colors()) +stderr = rich.console.Console( + stderr=True, style="dim", highlight=False, force_terminal=nf_core.utils.rich_force_colors() +) class DownloadProgress(rich.progress.Progress): @@ -253,6 +255,7 @@ def prompt_container_download(self): """Prompt whether to download container images or not""" if self.container is None: + stderr.print("\nIn addition to the pipeline code, this tool can download software containers.") self.container = questionary.select( "Download software container images:", choices=[ @@ -265,6 +268,10 @@ def prompt_container_download(self): def prompt_use_singularity_cachedir(self): """Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set""" if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is None: + stderr.print( + "\nNextflow and nf-core can use an environment variable called [blue]$NXF_SINGULARITY_CACHEDIR[/] that is a path to a directory where remote Singularity images are stored. " + "This allows downloaded images to be cached in a central location." + ) if rich.prompt.Confirm.ask( f"[blue bold]?[/] [white bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" ): @@ -292,8 +299,13 @@ def prompt_use_singularity_cachedir(self): if not os.path.isfile(bashrc_path): bashrc_path = False if bashrc_path: + stderr.print( + f"\nSo that [blue]$NXF_SINGULARITY_CACHEDIR[/] is always defined, you can add it to your [blue not bold]~/{os.path.basename(bashrc_path)}[/] file ." + "This will then be autmoatically set every time you open a new terminal. We can add the following line to this file for you: \n" + f'[blue]export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"[/]' + ) append_to_file = rich.prompt.Confirm.ask( - f"[blue bold]?[/] [white bold]Add [green not bold]'export NXF_SINGULARITY_CACHEDIR=\"{cachedir_path}\"'[/] to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" + f"[blue bold]?[/] [white bold]Add to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" ) if append_to_file: with open(os.path.expanduser(bashrc_path), "a") as f: @@ -315,8 +327,13 @@ def prompt_singularity_cachedir_only(self): and self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None ): + stderr.print( + "\nIf you are working on the same system where you will run Nextflow, you can leave the downloaded images in the " + "[blue not bold]$NXF_SINGULARITY_CACHEDIR[/] folder, Nextflow will automatically find them. " + "However if you will transfer the downloaded files to a different system then they should be copied to the target folder." + ) self.singularity_cache_only = rich.prompt.Confirm.ask( - f"[blue bold]?[/] [white bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the download folder?[/]" + f"[blue bold]?[/] [white bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the target folder?[/]" ) # Sanity check @@ -326,6 +343,13 @@ def prompt_singularity_cachedir_only(self): def prompt_compression_type(self): """Ask user if we should compress the downloaded files""" if self.compress_type is None: + stderr.print( + "\nIf transferring the downloaded files to another system, it can be convenient to have everything compressed in a single file." + ) + if self.container == "singularity": + stderr.print( + "[bold]This is [italic]not[/] recommended when downloading Singularity images, as it can take a long time and saves very little space." + ) self.compress_type = questionary.select( "Choose compression type:", choices=[ From f9c9fdc13cd3843242360a8b0ae5d0092dac4f38 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 02:04:04 +0200 Subject: [PATCH 150/210] Refactored code that gets branches and releases and hashes. Rewired some cli options. --- nf_core/__main__.py | 7 +- nf_core/download.py | 163 +++++++++++++++++++++-------------------- tests/test_download.py | 6 +- 3 files changed, 92 insertions(+), 84 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index d4a8daf38d..9a56bad95d 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -221,10 +221,9 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all help="Download software container images", ) @click.option( - "-s", - "--singularity-cache", - type=click.Choice(["yes", "no"]), - help="Don't copy images to the output directory, don't set 'singularity.cacheDir' in workflow", + "-s/-x", + "--singularity-cache/--no-singularity-cache", + help="Do / don't copy images to the output directory and set 'singularity.cacheDir' in workflow", ) @click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads") def download(pipeline, release, outdir, compress, force, container, singularity_cache, parallel_downloads): diff --git a/nf_core/download.py b/nf_core/download.py index 956018e267..83465f0c83 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -21,6 +21,8 @@ from zipfile import ZipFile import nf_core +import nf_core.list +import nf_core.utils log = logging.getLogger(__name__) stderr = rich.console.Console( @@ -104,6 +106,7 @@ def __init__( # Fetch remote workflows self.wfs = nf_core.list.Workflows() self.wfs.get_remote_workflows() + self.wf_releases = {} self.wf_branches = {} def download_workflow(self): @@ -112,12 +115,13 @@ def download_workflow(self): # Get workflow details try: self.prompt_pipeline_name() + self.fetch_workflow_details() self.prompt_release() + self.get_release_hash() self.prompt_container_download() self.prompt_use_singularity_cachedir() self.prompt_singularity_cachedir_only() self.prompt_compression_type() - self.fetch_workflow_details() except LookupError as e: log.critical(e) sys.exit(1) @@ -214,36 +218,16 @@ def prompt_release(self): if self.release is None: choices = [] - # We get releases from https://nf-co.re/pipelines.json - for wf in self.wfs.remote_workflows: - if wf.full_name == self.pipeline or wf.name == self.pipeline: - if len(wf.releases) > 0: - releases = sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) - for tag in map(lambda release: release.get("tag_name"), releases): - tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] - choices.append(questionary.Choice(title=tag_display, value=tag)) + # Releases + if len(self.wf_releases) > 0: + for tag in map(lambda release: release.get("tag_name"), self.wf_releases): + tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] + choices.append(questionary.Choice(title=tag_display, value=tag)) - try: - # Fetch branches from github api - branches_url = f"https://api.github.com/repos/nf-core/{self.pipeline}/branches" - branch_response = requests.get(branches_url) - - # Filter out the release tags and sort them - for branch in branch_response.json(): - self.wf_branches[branch["name"]] = branch["commit"]["sha"] - - for branch in self.wf_branches.keys(): - if ( - branch != "TEMPLATE" - and branch != "initial_commit" - and not branch.startswith("nf-core-template-merge") - ): - branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] - choices.append(questionary.Choice(title=branch_display, value=branch)) - - except TypeError: - # This will be picked up later if not a repo, just log for now - log.debug("Couldn't fetch branches - invalid repo?") + # Branches + for branch in self.wf_branches.keys(): + branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] + choices.append(questionary.Choice(title=branch_display, value=branch)) if len(choices) > 0: stderr.print("\nChoose the release or branch that should be downloaded.") @@ -251,6 +235,38 @@ def prompt_release(self): "Select release / branch:", choices=choices, style=nf_core.utils.nfcore_question_style ).unsafe_ask() + def get_release_hash(self): + """Find specified release / branch hash""" + + # Branch + if self.release in self.wf_branches.keys(): + self.wf_sha = self.wf_branches[self.release] + + # Release + else: + for r in self.wf_releases: + if r["tag_name"] == self.release.lstrip("v"): + self.wf_sha = r["tag_sha"] + break + + # Can't find the release or branch - throw an error + else: + log.error("Not able to find release '{}' for {}".format(self.release, self.wf_name)) + log.info( + "Available {} releases: {}".format( + self.wf_name, ", ".join([r["tag_name"] for r in self.wf_releases]) + ) + ) + log.info("Available {} branches: '{}'".format(self.wf_name, "', '".join(self.wf_branches.keys()))) + raise LookupError("Not able to find release / branch '{}' for {}".format(self.release, self.wf_name)) + + # Set the outdir + if not self.outdir: + self.outdir = "{}-{}".format(self.wf_name.replace("/", "-").lower(), self.release) + + # Set the download URL and return + self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.wf_name, self.wf_sha) + def prompt_container_download(self): """Prompt whether to download container images or not""" @@ -379,58 +395,47 @@ def fetch_workflow_details(self): # Set pipeline name self.wf_name = wf.name - # Find specified release / branch hash - if self.release is not None: + # Store releases + self.wf_releases = list( + sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) + ) - # Branch - if self.release in self.wf_branches.keys(): - self.wf_sha = self.wf_branches[self.release] + break - # Release - else: - for r in wf.releases: - if r["tag_name"] == self.release.lstrip("v"): - self.wf_sha = r["tag_sha"] - break - else: - log.error("Not able to find release '{}' for {}".format(self.release, wf.full_name)) - log.info( - "Available {} releases: {}".format( - wf.full_name, ", ".join([r["tag_name"] for r in wf.releases]) - ) - ) - raise LookupError("Not able to find release '{}' for {}".format(self.release, wf.full_name)) - - # Set outdir name if not defined - if not self.outdir: - self.outdir = "nf-core-{}".format(wf.name) - if self.release is not None: - self.outdir += "-{}".format(self.release) - - # Set the download URL and return - self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(wf.full_name, self.wf_sha) - return - - # If we got this far, must not be a nf-core pipeline - if self.pipeline.count("/") == 1: - # Looks like a GitHub address - try working with this repo - log.debug("Pipeline name looks like a GitHub address - attempting to download") - self.wf_name = self.pipeline - if not self.release: - self.release = "master" - self.wf_sha = self.release - if not self.outdir: - self.outdir = self.pipeline.replace("/", "-").lower() - if self.release is not None: - self.outdir += "-{}".format(self.release) - # Set the download URL and return - self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.pipeline, self.release) + # Must not be a nf-core pipeline else: - log.error("Not able to find pipeline '{}'".format(self.pipeline)) - log.info( - "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) - ) - raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + if self.pipeline.count("/") == 1: + + # Looks like a GitHub address - try working with this repo + self.wf_name = self.pipeline + log.info( + f"Pipeline '{self.wf_name}' not in nf-core, but looks like a GitHub address - attempting anyway" + ) + + # Get releases from GitHub API + releases_url = f"https://api.github.com/repos/{self.wf_name}/releases" + releases_response = requests.get(releases_url) + self.wf_releases = list( + sorted(releases_response.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True) + ) + + else: + log.error("Not able to find pipeline '{}'".format(self.pipeline)) + log.info( + "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) + ) + raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + + # Get branch information from github api + branches_url = f"https://api.github.com/repos/{self.wf_name}/branches" + branch_response = requests.get(branches_url) + for branch in branch_response.json(): + if ( + branch["name"] != "TEMPLATE" + and branch["name"] != "initial_commit" + and not branch["name"].startswith("nf-core-template-merge") + ): + self.wf_branches[branch["name"]] = branch["commit"]["sha"] def download_wf_files(self): """Downloads workflow files from GitHub to the :attr:`self.outdir`.""" diff --git a/tests/test_download.py b/tests/test_download.py index eb14b3cf77..12fcfdfdfc 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -195,7 +195,11 @@ def test_download_workflow_with_success(self, mock_download_image): tmp_dir = tempfile.mkdtemp() download_obj = DownloadWorkflow( - pipeline="nf-core/methylseq", outdir=os.path.join(tmp_dir, "new"), singularity=True + pipeline="nf-core/methylseq", + outdir=os.path.join(tmp_dir, "new"), + container="singularity", + release="dev", + compress="none", ) download_obj.download_workflow() From 60c3730875e855792c33516cfbd1bd2fb39950e6 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 08:19:11 +0200 Subject: [PATCH 151/210] Pipeline name should be full_name --- nf_core/download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/download.py b/nf_core/download.py index 83465f0c83..3dbcfbac25 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -393,7 +393,7 @@ def fetch_workflow_details(self): if wf.full_name == self.pipeline or wf.name == self.pipeline: # Set pipeline name - self.wf_name = wf.name + self.wf_name = wf.full_name # Store releases self.wf_releases = list( From 9c17495c170ccdac093f36ec684e834f0c57cb56 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 13:07:20 +0200 Subject: [PATCH 152/210] Update nf_core/launch.py --- nf_core/launch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/launch.py b/nf_core/launch.py index 8f131939dd..a25baf80e8 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -187,7 +187,7 @@ def get_pipeline_schema(self): "Please select a release:", choices=release_tags, style=nf_core.utils.nfcore_question_style, - ).ask() + ).unsafe_ask() except LookupError: pass From 3d0d4157dbda335bab40c0690a9b0873c570adf1 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 23:14:54 +0200 Subject: [PATCH 153/210] Just bold, not white on bold. See nf-core/tools#1045 --- nf_core/download.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 3dbcfbac25..1f84975eb7 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -289,15 +289,13 @@ def prompt_use_singularity_cachedir(self): "This allows downloaded images to be cached in a central location." ) if rich.prompt.Confirm.ask( - f"[blue bold]?[/] [white bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" + f"[blue bold]?[/] [bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" ): # Prompt user for a cache directory path cachedir_path = None while cachedir_path is None: cachedir_path = os.path.abspath( - rich.prompt.Prompt.ask( - "[blue bold]?[/] [white bold]Specify the path:[/] (leave blank to cancel)" - ) + rich.prompt.Prompt.ask("[blue bold]?[/] [bold]Specify the path:[/] (leave blank to cancel)") ) if cachedir_path == os.path.abspath(""): log.error(f"Not using [blue]$NXF_SINGULARITY_CACHEDIR[/]") @@ -321,7 +319,7 @@ def prompt_use_singularity_cachedir(self): f'[blue]export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"[/]' ) append_to_file = rich.prompt.Confirm.ask( - f"[blue bold]?[/] [white bold]Add to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" + f"[blue bold]?[/] [bold]Add to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" ) if append_to_file: with open(os.path.expanduser(bashrc_path), "a") as f: @@ -349,7 +347,7 @@ def prompt_singularity_cachedir_only(self): "However if you will transfer the downloaded files to a different system then they should be copied to the target folder." ) self.singularity_cache_only = rich.prompt.Confirm.ask( - f"[blue bold]?[/] [white bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the target folder?[/]" + f"[blue bold]?[/] [bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the target folder?[/]" ) # Sanity check From 10ddb0189fd805591e99eb005a6e8e85edb5f507 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 23:15:56 +0200 Subject: [PATCH 154/210] Removed more white bold, replaced with bold. Better on white colour scheme terminals. Fixes nf-core/tools#1045 --- nf_core/schema.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/schema.py b/nf_core/schema.py index 5196bcd8fb..83f66a807a 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -28,7 +28,7 @@ class PipelineSchema(object): functions to handle pipeline JSON Schema""" def __init__(self): - """ Initialise the object """ + """Initialise the object""" self.schema = None self.pipeline_dir = None @@ -46,7 +46,7 @@ def __init__(self): self.web_schema_build_api_url = None def get_schema_path(self, path, local_only=False, revision=None): - """ Given a pipeline name, directory, or path, set self.schema_filename """ + """Given a pipeline name, directory, or path, set self.schema_filename""" # Supplied path exists - assume a local pipeline directory or schema if os.path.exists(path): @@ -75,7 +75,7 @@ def get_schema_path(self, path, local_only=False, revision=None): raise AssertionError(error) def load_lint_schema(self): - """ Load and lint a given schema to see if it looks valid """ + """Load and lint a given schema to see if it looks valid""" try: self.load_schema() num_params = self.validate_schema() @@ -92,7 +92,7 @@ def load_lint_schema(self): raise AssertionError(error_msg) def load_schema(self): - """ Load a pipeline schema from a file """ + """Load a pipeline schema from a file""" with open(self.schema_filename, "r") as fh: self.schema = json.load(fh) self.schema_defaults = {} @@ -153,7 +153,7 @@ def get_schema_defaults(self): self.schema_defaults[p_key] = param["default"] def save_schema(self): - """ Save a pipeline schema to a file """ + """Save a pipeline schema to a file""" # Write results to a JSON file num_params = len(self.schema.get("properties", {})) num_params += sum([len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()]) @@ -189,7 +189,7 @@ def load_input_params(self, params_path): raise AssertionError(error_msg) def validate_params(self): - """ Check given parameters against a schema and validate """ + """Check given parameters against a schema and validate""" try: assert self.schema is not None jsonschema.validate(self.input_params, self.schema) @@ -317,7 +317,7 @@ def validate_schema_title_description(self, schema=None): ) def make_skeleton_schema(self): - """ Make a new pipeline schema from the template """ + """Make a new pipeline schema from the template""" self.schema_from_scratch = True # Use Jinja to render the template schema file to a variable env = jinja2.Environment( @@ -332,7 +332,7 @@ def make_skeleton_schema(self): self.get_schema_defaults() def build_schema(self, pipeline_dir, no_prompts, web_only, url): - """ Interactively build a new pipeline schema for a pipeline """ + """Interactively build a new pipeline schema for a pipeline""" if no_prompts: self.no_prompts = True @@ -476,7 +476,7 @@ def prompt_remove_schema_notfound_config(self, p_key): if self.no_prompts or self.schema_from_scratch: return True if Confirm.ask( - ":question: Unrecognised [white bold]'params.{}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?".format( + ":question: Unrecognised [bold]'params.{}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?".format( p_key ) ): @@ -497,7 +497,7 @@ def add_schema_found_configs(self): self.no_prompts or self.schema_from_scratch or Confirm.ask( - ":sparkles: Found [white bold]'params.{}'[/] in the pipeline config, but not in the schema. [blue]Add to pipeline schema?".format( + ":sparkles: Found [bold]'params.{}'[/] in the pipeline config, but not in the schema. [blue]Add to pipeline schema?".format( p_key ) ) From 06265eb6b943cb8dfc084daeda7b3166abf7fe2a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 23:21:18 +0200 Subject: [PATCH 155/210] Awesome questionary path auto-completion for singularity cachedir path --- nf_core/download.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nf_core/download.py b/nf_core/download.py index 1f84975eb7..c677151704 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -295,7 +295,9 @@ def prompt_use_singularity_cachedir(self): cachedir_path = None while cachedir_path is None: cachedir_path = os.path.abspath( - rich.prompt.Prompt.ask("[blue bold]?[/] [bold]Specify the path:[/] (leave blank to cancel)") + questionary.path( + "Specify the path:", only_directories=True, style=nf_core.utils.nfcore_question_style + ).unsafe_ask() ) if cachedir_path == os.path.abspath(""): log.error(f"Not using [blue]$NXF_SINGULARITY_CACHEDIR[/]") From c63487cb8083938fab1b729d1ab42a3be6d1430a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 23:54:52 +0200 Subject: [PATCH 156/210] Refine cli flags, -c now for container instead of compress --- nf_core/__main__.py | 19 ++++++------------- nf_core/download.py | 4 ++-- 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 9a56bad95d..8630336ea8 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -208,25 +208,18 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all @click.option("-r", "--release", type=str, help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( - "-c", - "--compress", - type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), - help="Archive compression type", + "-x", "--compress", type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), help="Archive compression type" ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-i", - "--container", - type=click.Choice(["none", "singularity"]), - help="Download software container images", + "-c", "--container", type=click.Choice(["none", "singularity"]), help="Download software container images" ) @click.option( - "-s/-x", - "--singularity-cache/--no-singularity-cache", - help="Do / don't copy images to the output directory and set 'singularity.cacheDir' in workflow", + "--singularity-cache-only/--singularity-cache-copy", + help="Don't / do copy images to the output directory and set 'singularity.cacheDir' in workflow", ) @click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads") -def download(pipeline, release, outdir, compress, force, container, singularity_cache, parallel_downloads): +def download(pipeline, release, outdir, compress, force, container, singularity_cache_only, parallel_downloads): """ Download a pipeline, nf-core/configs and pipeline singularity images. @@ -234,7 +227,7 @@ def download(pipeline, release, outdir, compress, force, container, singularity_ workflow to use relative paths to the configs and singularity images. """ dl = nf_core.download.DownloadWorkflow( - pipeline, release, outdir, compress, force, container, singularity_cache, parallel_downloads + pipeline, release, outdir, compress, force, container, singularity_cache_only, parallel_downloads ) dl.download_workflow() diff --git a/nf_core/download.py b/nf_core/download.py index c677151704..6e6d14ab0a 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -352,9 +352,9 @@ def prompt_singularity_cachedir_only(self): f"[blue bold]?[/] [bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the target folder?[/]" ) - # Sanity check + # Sanity check, for when passed as a cli flag if self.singularity_cache_only and self.container != "singularity": - raise LookupError("Command has '--singularity-cache' set, but '--container' is not 'singularity'") + raise LookupError("Command has '--singularity-cache-only' set, but '--container' is not 'singularity'") def prompt_compression_type(self): """Ask user if we should compress the downloaded files""" From 0f2b8f4e1fa5d90d06a5204648aa24581c092400 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 30 Apr 2021 23:59:30 +0200 Subject: [PATCH 157/210] Launch - sort releases by release date --- nf_core/launch.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/launch.py b/nf_core/launch.py index a25baf80e8..216037477b 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -22,7 +22,7 @@ class Launch(object): - """ Class to hold config option to launch a pipeline """ + """Class to hold config option to launch a pipeline""" def __init__( self, @@ -164,7 +164,7 @@ def launch_pipeline(self): self.launch_workflow() def get_pipeline_schema(self): - """ Load and validate the schema from the supplied pipeline """ + """Load and validate the schema from the supplied pipeline""" # Set up the schema self.schema_obj = nf_core.schema.PipelineSchema() @@ -246,7 +246,7 @@ def try_fetch_release_tags(self): if len(release_tags) == 0: log.error(f"Unable to find any release tags for {self.pipeline}. Will try to continue launch.") raise LookupError - release_tags = sorted(release_tags, key=lambda tag: tuple(tag.split(".")), reverse=True) + release_tags = sorted(release_tags, key=lambda tag: tag.get("published_at_timestamp", 0), reverse=True) return release_tags def set_schema_inputs(self): @@ -265,7 +265,7 @@ def set_schema_inputs(self): self.schema_obj.validate_params() def merge_nxf_flag_schema(self): - """ Take the Nextflow flag schema and merge it with the pipeline schema """ + """Take the Nextflow flag schema and merge it with the pipeline schema""" # Add the coreNextflow subschema to the schema definitions if "definitions" not in self.schema_obj.schema: self.schema_obj.schema["definitions"] = {} @@ -277,7 +277,7 @@ def merge_nxf_flag_schema(self): self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) def prompt_web_gui(self): - """ Ask whether to use the web-based or cli wizard to collect params """ + """Ask whether to use the web-based or cli wizard to collect params""" log.info( "[magenta]Would you like to enter pipeline parameters using a web-based interface or a command-line wizard?" ) @@ -292,7 +292,7 @@ def prompt_web_gui(self): return answer["use_web_gui"] == "Web based" def launch_web_gui(self): - """ Send schema to nf-core website and launch input GUI """ + """Send schema to nf-core website and launch input GUI""" content = { "post_content": "json_schema_launcher", @@ -397,7 +397,7 @@ def sanitise_web_response(self): params[param_id] = filter_func(params[param_id]) def prompt_schema(self): - """ Go through the pipeline schema and prompt user to change defaults """ + """Go through the pipeline schema and prompt user to change defaults""" answers = {} # Start with the subschema in the definitions - use order of allOf for allOf in self.schema_obj.schema.get("allOf", []): @@ -659,7 +659,7 @@ def print_param_header(self, param_id, param_obj, is_group=False): console.print("(Use arrow keys)", style="italic", highlight=False) def strip_default_params(self): - """ Strip parameters if they have not changed from the default """ + """Strip parameters if they have not changed from the default""" # Go through each supplied parameter (force list so we can delete in the loop) for param_id in list(self.schema_obj.input_params.keys()): @@ -683,7 +683,7 @@ def strip_default_params(self): del self.nxf_flags[param_id] def build_command(self): - """ Build the nextflow run command based on what we know """ + """Build the nextflow run command based on what we know""" # Core nextflow options for flag, val in self.nxf_flags.items(): @@ -717,7 +717,7 @@ def build_command(self): self.nextflow_cmd += ' --{} "{}"'.format(param, str(val).replace('"', '\\"')) def launch_workflow(self): - """ Launch nextflow if required """ + """Launch nextflow if required""" log.info("[bold underline]Nextflow command:[/]\n[magenta]{}\n\n".format(self.nextflow_cmd)) if Confirm.ask("Do you want to run this command now? "): From bb9dffee653cc4bb272df52e01cb1df993a0236a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 00:00:57 +0200 Subject: [PATCH 158/210] Black - docstring quotes format change --- nf_core/__main__.py | 2 +- nf_core/create.py | 2 +- nf_core/launch.py | 18 +++---- nf_core/lint/__init__.py | 2 +- nf_core/list.py | 8 +-- nf_core/modules/lint.py | 10 ++-- nf_core/modules/pipeline_modules.py | 2 +- nf_core/modules/test_yml_builder.py | 10 ++-- nf_core/schema.py | 16 +++--- nf_core/sync.py | 2 +- nf_core/utils.py | 4 +- tests/test_bump_version.py | 4 +- tests/test_cli.py | 6 +-- tests/test_launch.py | 62 +++++++++++----------- tests/test_licenses.py | 2 +- tests/test_list.py | 8 +-- tests/test_modules.py | 46 ++++++++--------- tests/test_schema.py | 80 ++++++++++++++--------------- tests/test_sync.py | 34 ++++++------ tests/test_utils.py | 2 +- 20 files changed, 160 insertions(+), 160 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index af29172882..55498a6670 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -257,7 +257,7 @@ def licences(pipeline, json): # nf-core create def validate_wf_name_prompt(ctx, opts, value): - """ Force the workflow name to meet the nf-core requirements """ + """Force the workflow name to meet the nf-core requirements""" if not re.match(r"^[a-z]+$", value): click.echo("Invalid workflow name: must be lowercase without punctuation.") value = click.prompt(opts.prompt) diff --git a/nf_core/create.py b/nf_core/create.py index 85c60b7bc4..d7fa48dabe 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -47,7 +47,7 @@ def __init__(self, name, description, author, version="1.0dev", no_git=False, fo self.outdir = os.path.join(os.getcwd(), self.name_noslash) def init_pipeline(self): - """Creates the nf-core pipeline. """ + """Creates the nf-core pipeline.""" # Make the new pipeline self.render_template() diff --git a/nf_core/launch.py b/nf_core/launch.py index ce571f373c..02cbd9233b 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -21,7 +21,7 @@ class Launch(object): - """ Class to hold config option to launch a pipeline """ + """Class to hold config option to launch a pipeline""" def __init__( self, @@ -163,7 +163,7 @@ def launch_pipeline(self): self.launch_workflow() def get_pipeline_schema(self): - """ Load and validate the schema from the supplied pipeline """ + """Load and validate the schema from the supplied pipeline""" # Set up the schema self.schema_obj = nf_core.schema.PipelineSchema() @@ -224,7 +224,7 @@ def set_schema_inputs(self): self.schema_obj.validate_params() def merge_nxf_flag_schema(self): - """ Take the Nextflow flag schema and merge it with the pipeline schema """ + """Take the Nextflow flag schema and merge it with the pipeline schema""" # Add the coreNextflow subschema to the schema definitions if "definitions" not in self.schema_obj.schema: self.schema_obj.schema["definitions"] = {} @@ -236,7 +236,7 @@ def merge_nxf_flag_schema(self): self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) def prompt_web_gui(self): - """ Ask whether to use the web-based or cli wizard to collect params """ + """Ask whether to use the web-based or cli wizard to collect params""" log.info( "[magenta]Would you like to enter pipeline parameters using a web-based interface or a command-line wizard?" ) @@ -251,7 +251,7 @@ def prompt_web_gui(self): return answer["use_web_gui"] == "Web based" def launch_web_gui(self): - """ Send schema to nf-core website and launch input GUI """ + """Send schema to nf-core website and launch input GUI""" content = { "post_content": "json_schema_launcher", @@ -356,7 +356,7 @@ def sanitise_web_response(self): params[param_id] = filter_func(params[param_id]) def prompt_schema(self): - """ Go through the pipeline schema and prompt user to change defaults """ + """Go through the pipeline schema and prompt user to change defaults""" answers = {} # Start with the subschema in the definitions - use order of allOf for allOf in self.schema_obj.schema.get("allOf", []): @@ -618,7 +618,7 @@ def print_param_header(self, param_id, param_obj, is_group=False): console.print("(Use arrow keys)", style="italic", highlight=False) def strip_default_params(self): - """ Strip parameters if they have not changed from the default """ + """Strip parameters if they have not changed from the default""" # Go through each supplied parameter (force list so we can delete in the loop) for param_id in list(self.schema_obj.input_params.keys()): @@ -642,7 +642,7 @@ def strip_default_params(self): del self.nxf_flags[param_id] def build_command(self): - """ Build the nextflow run command based on what we know """ + """Build the nextflow run command based on what we know""" # Core nextflow options for flag, val in self.nxf_flags.items(): @@ -676,7 +676,7 @@ def build_command(self): self.nextflow_cmd += ' --{} "{}"'.format(param, str(val).replace('"', '\\"')) def launch_workflow(self): - """ Launch nextflow if required """ + """Launch nextflow if required""" log.info("[bold underline]Nextflow command:[/]\n[magenta]{}\n\n".format(self.nextflow_cmd)) if Confirm.ask("Do you want to run this command now? "): diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 82c4d57ddd..a256fab6ba 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -117,7 +117,7 @@ class PipelineLint(nf_core.utils.Pipeline): from .version_consistency import version_consistency def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=False): - """ Initialise linting object """ + """Initialise linting object""" # Initialise the parent object super().__init__(wf_path) diff --git a/nf_core/list.py b/nf_core/list.py index 3278d8e1ff..6cd64acb48 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -263,7 +263,7 @@ def sort_pulled_date(wf): return table def print_json(self): - """ Dump JSON of all parsed information """ + """Dump JSON of all parsed information""" return json.dumps( {"local_workflows": self.local_workflows, "remote_workflows": self.remote_workflows}, default=lambda o: o.__dict__, @@ -308,10 +308,10 @@ def __init__(self, data): class LocalWorkflow(object): - """ Class to handle local workflows pulled by nextflow """ + """Class to handle local workflows pulled by nextflow""" def __init__(self, name): - """ Initialise the LocalWorkflow object """ + """Initialise the LocalWorkflow object""" self.full_name = name self.repository = None self.local_path = None @@ -324,7 +324,7 @@ def __init__(self, name): self.last_pull_pretty = None def get_local_nf_workflow_details(self): - """ Get full details about a local cached workflow """ + """Get full details about a local cached workflow""" if self.local_path is None: diff --git a/nf_core/modules/lint.py b/nf_core/modules/lint.py index fcce1abf25..6f5d642b51 100644 --- a/nf_core/modules/lint.py +++ b/nf_core/modules/lint.py @@ -37,7 +37,7 @@ class ModuleLintException(Exception): class LintResult(object): - """ An object to hold the results of a lint test """ + """An object to hold the results of a lint test""" def __init__(self, mod, lint_test, message, file_path): self.mod = mod @@ -476,7 +476,7 @@ def __init__(self, module_dir, repo_type, base_dir, nf_core_module=True): self.module_name = module_dir.split("software" + os.sep)[1] def lint(self): - """ Perform linting on this module """ + """Perform linting on this module""" # Iterate over modules and run all checks on them # Lint the main.nf file @@ -501,7 +501,7 @@ def lint(self): return self.passed, self.warned, self.failed def lint_module_tests(self): - """ Lint module tests """ + """Lint module tests""" if os.path.exists(self.test_dir): self.passed.append(("test_dir_exists", "Test directory exists", self.test_dir)) @@ -552,7 +552,7 @@ def lint_module_tests(self): self.failed.append(("test_yml_exists", "Test `test.yml` does not exist", self.test_yml)) def lint_meta_yml(self): - """ Lint a meta yml file """ + """Lint a meta yml file""" required_keys = ["name", "input", "output"] required_keys_lists = ["intput", "output"] try: @@ -883,7 +883,7 @@ def _parse_output(self, line): return output def _is_empty(self, line): - """ Check whether a line is empty or a comment """ + """Check whether a line is empty or a comment""" empty = False if line.strip().startswith("//"): empty = True diff --git a/nf_core/modules/pipeline_modules.py b/nf_core/modules/pipeline_modules.py index 16774e01ba..719131d037 100644 --- a/nf_core/modules/pipeline_modules.py +++ b/nf_core/modules/pipeline_modules.py @@ -289,7 +289,7 @@ def remove(self, module): return False def get_pipeline_modules(self): - """ Get list of modules installed in the current pipeline """ + """Get list of modules installed in the current pipeline""" self.pipeline_module_names = [] module_mains = glob.glob(f"{self.pipeline_dir}/modules/nf-core/software/**/main.nf", recursive=True) for mod in module_mains: diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index 9bee3a614c..863f4ce416 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -47,7 +47,7 @@ def __init__( self.tests = [] def run(self): - """ Run build steps """ + """Run build steps""" if not self.no_prompts: log.info( "[yellow]Press enter to use default values [cyan bold](shown in brackets) [yellow]or type your own responses" @@ -58,7 +58,7 @@ def run(self): self.print_test_yml() def check_inputs(self): - """ Do more complex checks about supplied flags. """ + """Do more complex checks about supplied flags.""" # Get the tool name if not specified if self.module_name is None: @@ -113,7 +113,7 @@ def check_inputs(self): ) def scrape_workflow_entry_points(self): - """ Find the test workflow entry points from main.nf """ + """Find the test workflow entry points from main.nf""" log.info(f"Looking for test workflow entry points: '{self.module_test_main}'") with open(self.module_test_main, "r") as fh: for line in fh: @@ -196,7 +196,7 @@ def _md5(self, fname): return md5sum def create_test_file_dict(self, results_dir): - """ Walk through directory and collect md5 sums """ + """Walk through directory and collect md5 sums""" test_files = [] for root, dir, file in os.walk(results_dir): for elem in file: @@ -249,7 +249,7 @@ def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repea return test_files def run_tests_workflow(self, command): - """ Given a test workflow and an entry point, run the test workflow """ + """Given a test workflow and an entry point, run the test workflow""" # The config expects $PROFILE and Nextflow fails if it's not set if os.environ.get("PROFILE") is None: diff --git a/nf_core/schema.py b/nf_core/schema.py index 5196bcd8fb..51eea138e1 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -28,7 +28,7 @@ class PipelineSchema(object): functions to handle pipeline JSON Schema""" def __init__(self): - """ Initialise the object """ + """Initialise the object""" self.schema = None self.pipeline_dir = None @@ -46,7 +46,7 @@ def __init__(self): self.web_schema_build_api_url = None def get_schema_path(self, path, local_only=False, revision=None): - """ Given a pipeline name, directory, or path, set self.schema_filename """ + """Given a pipeline name, directory, or path, set self.schema_filename""" # Supplied path exists - assume a local pipeline directory or schema if os.path.exists(path): @@ -75,7 +75,7 @@ def get_schema_path(self, path, local_only=False, revision=None): raise AssertionError(error) def load_lint_schema(self): - """ Load and lint a given schema to see if it looks valid """ + """Load and lint a given schema to see if it looks valid""" try: self.load_schema() num_params = self.validate_schema() @@ -92,7 +92,7 @@ def load_lint_schema(self): raise AssertionError(error_msg) def load_schema(self): - """ Load a pipeline schema from a file """ + """Load a pipeline schema from a file""" with open(self.schema_filename, "r") as fh: self.schema = json.load(fh) self.schema_defaults = {} @@ -153,7 +153,7 @@ def get_schema_defaults(self): self.schema_defaults[p_key] = param["default"] def save_schema(self): - """ Save a pipeline schema to a file """ + """Save a pipeline schema to a file""" # Write results to a JSON file num_params = len(self.schema.get("properties", {})) num_params += sum([len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()]) @@ -189,7 +189,7 @@ def load_input_params(self, params_path): raise AssertionError(error_msg) def validate_params(self): - """ Check given parameters against a schema and validate """ + """Check given parameters against a schema and validate""" try: assert self.schema is not None jsonschema.validate(self.input_params, self.schema) @@ -317,7 +317,7 @@ def validate_schema_title_description(self, schema=None): ) def make_skeleton_schema(self): - """ Make a new pipeline schema from the template """ + """Make a new pipeline schema from the template""" self.schema_from_scratch = True # Use Jinja to render the template schema file to a variable env = jinja2.Environment( @@ -332,7 +332,7 @@ def make_skeleton_schema(self): self.get_schema_defaults() def build_schema(self, pipeline_dir, no_prompts, web_only, url): - """ Interactively build a new pipeline schema for a pipeline """ + """Interactively build a new pipeline schema for a pipeline""" if no_prompts: self.no_prompts = True diff --git a/nf_core/sync.py b/nf_core/sync.py index f198f90ca2..6b7c8168f7 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -63,7 +63,7 @@ def __init__( gh_repo=None, gh_username=None, ): - """ Initialise syncing object """ + """Initialise syncing object""" self.pipeline_dir = os.path.abspath(pipeline_dir) self.from_branch = from_branch diff --git a/nf_core/utils.py b/nf_core/utils.py index 18f2dcb581..2670f0310d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -108,7 +108,7 @@ class Pipeline(object): """ def __init__(self, wf_path): - """ Initialise pipeline object """ + """Initialise pipeline object""" self.conda_config = {} self.conda_package_info = {} self.nf_config = {} @@ -521,7 +521,7 @@ def get_tag_date(tag_date): def custom_yaml_dumper(): - """ Overwrite default PyYAML output to make Prettier YAML linting happy """ + """Overwrite default PyYAML output to make Prettier YAML linting happy""" class CustomDumper(yaml.Dumper): def represent_dict_preserve_order(self, data): diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 74e9dfddf0..9ced58b8e2 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -11,7 +11,7 @@ def test_bump_pipeline_version(datafiles): - """ Test that making a release with the working example files works """ + """Test that making a release with the working example files works""" # Get a workflow and configs test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( @@ -49,7 +49,7 @@ def test_bump_pipeline_version(datafiles): def test_dev_bump_pipeline_version(datafiles): - """ Test that making a release works with a dev name and a leading v """ + """Test that making a release works with a dev name and a leading v""" # Get a workflow and configs test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( diff --git a/tests/test_cli.py b/tests/test_cli.py index eb1ab6f9df..474314b8eb 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -11,12 +11,12 @@ @mock.patch("nf_core.__main__.nf_core_cli") def test_header(mock_cli): - """ Just try to execute the header function """ + """Just try to execute the header function""" nf_core.__main__.run_nf_core() def test_cli_help(): - """ Test the main launch function with --help """ + """Test the main launch function with --help""" runner = CliRunner() result = runner.invoke(nf_core.__main__.nf_core_cli, ["--help"]) assert result.exit_code == 0 @@ -24,7 +24,7 @@ def test_cli_help(): def test_cli_bad_subcommand(): - """ Test the main launch function with verbose flag and an unrecognised argument """ + """Test the main launch function with verbose flag and an unrecognised argument""" runner = CliRunner() result = runner.invoke(nf_core.__main__.nf_core_cli, ["-v", "foo"]) assert result.exit_code == 2 diff --git a/tests/test_launch.py b/tests/test_launch.py index e592d56363..560619a689 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -16,7 +16,7 @@ class TestLaunch(unittest.TestCase): """Class for launch tests""" def setUp(self): - """ Create a new PipelineSchema and Launch objects """ + """Create a new PipelineSchema and Launch objects""" # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") @@ -26,12 +26,12 @@ def setUp(self): @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): - """ Test the main launch function """ + """Test the main launch function""" self.launcher.launch_pipeline() @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists(self, mock_confirm): - """ Test that we detect an existing params file and return """ + """Test that we detect an existing params file and return""" # Make an empty params file to be overwritten open(self.nf_params_fn, "a").close() # Try and to launch, return with error @@ -41,19 +41,19 @@ def test_launch_file_exists(self, mock_confirm): @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists_overwrite(self, mock_webbrowser, mock_lauch_web_gui, mock_confirm): - """ Test that we detect an existing params file and we overwrite it """ + """Test that we detect an existing params file and we overwrite it""" # Make an empty params file to be overwritten open(self.nf_params_fn, "a").close() # Try and to launch, return with error self.launcher.launch_pipeline() def test_get_pipeline_schema(self): - """ Test loading the params schema from a pipeline """ + """Test loading the params schema from a pipeline""" self.launcher.get_pipeline_schema() assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 def test_make_pipeline_schema(self): - """ Make a copy of the template workflow, but delete the schema file, then try to load it """ + """Make a copy of the template workflow, but delete the schema file, then try to load it""" test_pipeline_dir = os.path.join(tempfile.mkdtemp(), "wf") shutil.copytree(self.template_dir, test_pipeline_dir) os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) @@ -68,14 +68,14 @@ def test_make_pipeline_schema(self): } def test_get_pipeline_defaults(self): - """ Test fetching default inputs from the pipeline schema """ + """Test fetching default inputs from the pipeline schema""" self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() assert len(self.launcher.schema_obj.input_params) > 0 assert self.launcher.schema_obj.input_params["outdir"] == "./results" def test_get_pipeline_defaults_input_params(self): - """ Test fetching default inputs from the pipeline schema with an input params file supplied """ + """Test fetching default inputs from the pipeline schema with an input params file supplied""" tmp_filehandle, tmp_filename = tempfile.mkstemp() with os.fdopen(tmp_filehandle, "w") as fh: json.dump({"outdir": "fubar"}, fh) @@ -86,7 +86,7 @@ def test_get_pipeline_defaults_input_params(self): assert self.launcher.schema_obj.input_params["outdir"] == "fubar" def test_nf_merge_schema(self): - """ Checking merging the nextflow schema with the pipeline schema """ + """Checking merging the nextflow schema with the pipeline schema""" self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() self.launcher.merge_nxf_flag_schema() @@ -94,7 +94,7 @@ def test_nf_merge_schema(self): assert "-resume" in self.launcher.schema_obj.schema["definitions"]["coreNextflow"]["properties"] def test_ob_to_questionary_string(self): - """ Check converting a python dict to a pyenquirer format - simple strings """ + """Check converting a python dict to a pyenquirer format - simple strings""" sc_obj = { "type": "string", "default": "data/*{1,2}.fastq.gz", @@ -104,17 +104,17 @@ def test_ob_to_questionary_string(self): @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): - """ Check the prompt to launch the web schema or use the cli """ + """Check the prompt to launch the web schema or use the cli""" assert self.launcher.prompt_web_gui() == True @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Command line"}]) def test_prompt_web_gui_false(self, mock_prompt): - """ Check the prompt to launch the web schema or use the cli """ + """Check the prompt to launch the web schema or use the cli""" assert self.launcher.prompt_web_gui() == False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{}]) def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): - """ Check the code that opens the web browser """ + """Check the code that opens the web browser""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() try: @@ -129,14 +129,14 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): @mock.patch("webbrowser.open") @mock.patch("nf_core.utils.wait_cli_function") def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wait_cli_function): - """ Check the code that opens the web browser """ + """Check the code that opens the web browser""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() assert self.launcher.launch_web_gui() == None @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - status error """ + """Test polling the website for a launch response - status error""" try: self.launcher.get_web_launch_response() raise UserWarning("Should have hit an AssertionError") @@ -145,7 +145,7 @@ def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "foo"}]) def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - status error """ + """Test polling the website for a launch response - status error""" try: self.launcher.get_web_launch_response() raise UserWarning("Should have hit an AssertionError") @@ -154,12 +154,12 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - status waiting_for_user""" + """Test polling the website for a launch response - status waiting_for_user""" assert self.launcher.get_web_launch_response() == False @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): - """ Test polling the website for a launch response - complete, but missing keys """ + """Test polling the website for a launch response - complete, but missing keys""" try: self.launcher.get_web_launch_response() raise UserWarning("Should have hit an AssertionError") @@ -183,12 +183,12 @@ def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): ) @mock.patch.object(nf_core.launch.Launch, "sanitise_web_response") def test_get_web_launch_response_valid(self, mock_poll_nfcore_web_api, mock_sanitise): - """ Test polling the website for a launch response - complete, valid response """ + """Test polling the website for a launch response - complete, valid response""" self.launcher.get_pipeline_schema() assert self.launcher.get_web_launch_response() == True def test_sanitise_web_response(self): - """ Check that we can properly sanitise results from the web """ + """Check that we can properly sanitise results from the web""" self.launcher.get_pipeline_schema() self.launcher.nxf_flags["-name"] = "" self.launcher.schema_obj.input_params["single_end"] = "true" @@ -199,7 +199,7 @@ def test_sanitise_web_response(self): assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): - """ Check converting a python dict to a pyenquirer format - booleans """ + """Check converting a python dict to a pyenquirer format - booleans""" sc_obj = { "type": "boolean", "default": "True", @@ -219,7 +219,7 @@ def test_ob_to_questionary_bool(self): assert result["filter"](False) == False def test_ob_to_questionary_number(self): - """ Check converting a python dict to a pyenquirer format - with enum """ + """Check converting a python dict to a pyenquirer format - with enum""" sc_obj = {"type": "number", "default": 0.1} result = self.launcher.single_param_to_questionary("min_reps_consensus", sc_obj) assert result["type"] == "input" @@ -233,7 +233,7 @@ def test_ob_to_questionary_number(self): assert result["filter"]("") == "" def test_ob_to_questionary_integer(self): - """ Check converting a python dict to a pyenquirer format - with enum """ + """Check converting a python dict to a pyenquirer format - with enum""" sc_obj = {"type": "integer", "default": 1} result = self.launcher.single_param_to_questionary("broad_cutoff", sc_obj) assert result["type"] == "input" @@ -247,7 +247,7 @@ def test_ob_to_questionary_integer(self): assert result["filter"]("") == "" def test_ob_to_questionary_range(self): - """ Check converting a python dict to a pyenquirer format - with enum """ + """Check converting a python dict to a pyenquirer format - with enum""" sc_obj = {"type": "number", "minimum": "10", "maximum": "20", "default": 15} result = self.launcher.single_param_to_questionary("broad_cutoff", sc_obj) assert result["type"] == "input" @@ -261,7 +261,7 @@ def test_ob_to_questionary_range(self): assert result["filter"]("") == "" def test_ob_to_questionary_enum(self): - """ Check converting a python dict to a questionary format - with enum """ + """Check converting a python dict to a questionary format - with enum""" sc_obj = {"type": "string", "default": "copy", "enum": ["symlink", "rellink"]} result = self.launcher.single_param_to_questionary("publish_dir_mode", sc_obj) assert result["type"] == "list" @@ -269,7 +269,7 @@ def test_ob_to_questionary_enum(self): assert result["choices"] == ["symlink", "rellink"] def test_ob_to_questionary_pattern(self): - """ Check converting a python dict to a questionary format - with pattern """ + """Check converting a python dict to a questionary format - with pattern""" sc_obj = {"type": "string", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"} result = self.launcher.single_param_to_questionary("email", sc_obj) assert result["type"] == "input" @@ -281,7 +281,7 @@ def test_ob_to_questionary_pattern(self): ) def test_strip_default_params(self): - """ Test stripping default parameters """ + """Test stripping default parameters""" self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) @@ -290,14 +290,14 @@ def test_strip_default_params(self): assert self.launcher.schema_obj.input_params == {"input": "custom_input"} def test_build_command_empty(self): - """ Test the functionality to build a nextflow command - nothing customsied """ + """Test the functionality to build a nextflow command - nothing customsied""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.build_command() assert self.launcher.nextflow_cmd == "nextflow run {}".format(self.template_dir) def test_build_command_nf(self): - """ Test the functionality to build a nextflow command - core nf customised """ + """Test the functionality to build a nextflow command - core nf customised""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.nxf_flags["-name"] = "Test_Workflow" @@ -306,7 +306,7 @@ def test_build_command_nf(self): assert self.launcher.nextflow_cmd == 'nextflow run {} -name "Test_Workflow" -resume'.format(self.template_dir) def test_build_command_params(self): - """ Test the functionality to build a nextflow command - params supplied """ + """Test the functionality to build a nextflow command - params supplied""" self.launcher.get_pipeline_schema() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() @@ -320,7 +320,7 @@ def test_build_command_params(self): assert saved_json == {"input": "custom_input"} def test_build_command_params_cl(self): - """ Test the functionality to build a nextflow command - params on Nextflow command line """ + """Test the functionality to build a nextflow command - params on Nextflow command line""" self.launcher.use_params_file = False self.launcher.get_pipeline_schema() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) diff --git a/tests/test_licenses.py b/tests/test_licenses.py index 385237229f..7af179bdc9 100644 --- a/tests/test_licenses.py +++ b/tests/test_licenses.py @@ -17,7 +17,7 @@ class WorkflowLicensesTest(unittest.TestCase): retrieval functionality of nf-core tools.""" def setUp(self): - """ Create a new pipeline, then make a Licence object """ + """Create a new pipeline, then make a Licence object""" # Set up the schema self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) diff --git a/tests/test_list.py b/tests/test_list.py index 97be0771be..082c7ffffd 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -20,7 +20,7 @@ class TestLint(unittest.TestCase): @mock.patch("subprocess.check_output") def test_working_listcall(self, mock_subprocess): - """ Test that listing pipelines works """ + """Test that listing pipelines works""" wf_table = nf_core.list.list_workflows() console = Console(record=True) console.print(wf_table) @@ -30,7 +30,7 @@ def test_working_listcall(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_archived(self, mock_subprocess): - """ Test that listing pipelines works, showing archived pipelines """ + """Test that listing pipelines works, showing archived pipelines""" wf_table = nf_core.list.list_workflows(show_archived=True) console = Console(record=True) console.print(wf_table) @@ -39,7 +39,7 @@ def test_working_listcall_archived(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_json(self, mock_subprocess): - """ Test that listing pipelines with JSON works """ + """Test that listing pipelines with JSON works""" wf_json_str = nf_core.list.list_workflows(as_json=True) wf_json = json.loads(wf_json_str) for wf in wf_json["remote_workflows"]: @@ -49,7 +49,7 @@ def test_working_listcall_json(self, mock_subprocess): raise AssertionError("Could not find ampliseq in JSON") def test_pretty_datetime(self): - """ Test that the pretty datetime function works """ + """Test that the pretty datetime function works""" now = datetime.now() nf_core.list.pretty_date(now) now_ts = time.mktime(now.timetuple()) diff --git a/tests/test_modules.py b/tests/test_modules.py index eb70ad759d..cb4c25f4ac 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -14,7 +14,7 @@ def create_modules_repo_dummy(): - """ Create a dummy copy of the nf-core/modules repo """ + """Create a dummy copy of the nf-core/modules repo""" root_dir = tempfile.mkdtemp() os.mkdir(os.path.join(root_dir, "software")) os.makedirs(os.path.join(root_dir, "tests", "software")) @@ -32,7 +32,7 @@ class TestModules(unittest.TestCase): """Class for modules tests""" def setUp(self): - """ Create a new PipelineSchema and Launch objects """ + """Create a new PipelineSchema and Launch objects""" # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") @@ -45,13 +45,13 @@ def setUp(self): self.nfcore_modules = create_modules_repo_dummy() def test_modulesrepo_class(self): - """ Initialise a modules repo object """ + """Initialise a modules repo object""" modrepo = nf_core.modules.ModulesRepo() assert modrepo.name == "nf-core/modules" assert modrepo.branch == "master" def test_modules_list(self): - """ Test listing available modules """ + """Test listing available modules""" self.mods.pipeline_dir = None listed_mods = self.mods.list_modules() console = Console(record=True) @@ -60,45 +60,45 @@ def test_modules_list(self): assert "fastqc" in output def test_modules_install_nopipeline(self): - """ Test installing a module - no pipeline given """ + """Test installing a module - no pipeline given""" self.mods.pipeline_dir = None assert self.mods.install("foo") is False def test_modules_install_emptypipeline(self): - """ Test installing a module - empty dir given """ + """Test installing a module - empty dir given""" self.mods.pipeline_dir = tempfile.mkdtemp() with pytest.raises(UserWarning) as excinfo: self.mods.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) def test_modules_install_nomodule(self): - """ Test installing a module - unrecognised module given """ + """Test installing a module - unrecognised module given""" assert self.mods.install("foo") is False def test_modules_install_fastqc(self): - """ Test installing a module - FastQC """ + """Test installing a module - FastQC""" assert self.mods.install("fastqc") is not False module_path = os.path.join(self.mods.pipeline_dir, "modules", "nf-core", "software", "fastqc") assert os.path.exists(module_path) def test_modules_install_fastqc_twice(self): - """ Test installing a module - FastQC already there """ + """Test installing a module - FastQC already there""" self.mods.install("fastqc") assert self.mods.install("fastqc") is False def test_modules_remove_fastqc(self): - """ Test removing FastQC module after installing it""" + """Test removing FastQC module after installing it""" self.mods.install("fastqc") module_path = os.path.join(self.mods.pipeline_dir, "modules", "nf-core", "software", "fastqc") assert self.mods.remove("fastqc") assert os.path.exists(module_path) is False def test_modules_remove_fastqc_uninstalled(self): - """ Test removing FastQC module without installing it """ + """Test removing FastQC module without installing it""" assert self.mods.remove("fastqc") is False def test_modules_lint_fastqc(self): - """ Test linting the fastqc module """ + """Test linting the fastqc module""" self.mods.install("fastqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) @@ -107,7 +107,7 @@ def test_modules_lint_fastqc(self): assert len(module_lint.failed) == 0 def test_modules_lint_empty(self): - """ Test linting a pipeline with no modules installed """ + """Test linting a pipeline with no modules installed""" module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.passed) == 0 @@ -115,7 +115,7 @@ def test_modules_lint_empty(self): assert len(module_lint.failed) == 0 def test_modules_lint_new_modules(self): - """ lint all modules in nf-core/modules repo clone """ + """lint all modules in nf-core/modules repo clone""" module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=True, all_modules=True) assert len(module_lint.passed) == 20 @@ -123,13 +123,13 @@ def test_modules_lint_new_modules(self): assert len(module_lint.failed) == 0 def test_modules_create_succeed(self): - """ Succeed at creating the FastQC module """ + """Succeed at creating the FastQC module""" module_create = nf_core.modules.ModuleCreate(self.pipeline_dir, "fastqc", "@author", "process_low", True, True) module_create.create() assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "fastqc.nf")) def test_modules_create_fail_exists(self): - """ Fail at creating the same module twice""" + """Fail at creating the same module twice""" module_create = nf_core.modules.ModuleCreate( self.pipeline_dir, "fastqc", "@author", "process_low", False, False ) @@ -139,7 +139,7 @@ def test_modules_create_fail_exists(self): assert "Module file exists already" in str(excinfo.value) def test_modules_custom_yml_dumper(self): - """ Try to create a yml file with the custom yml dumper """ + """Try to create a yml file with the custom yml dumper""" out_dir = tempfile.mkdtemp() yml_output_path = os.path.join(out_dir, "test.yml") meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) @@ -149,7 +149,7 @@ def test_modules_custom_yml_dumper(self): assert os.path.isfile(yml_output_path) def test_modules_test_file_dict(self): - """ Creat dict of test files and create md5 sums """ + """Creat dict of test files and create md5 sums""" test_file_dir = tempfile.mkdtemp() meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: @@ -159,7 +159,7 @@ def test_modules_test_file_dict(self): assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" def test_modules_create_test_yml_get_md5(self): - """ Get md5 sums from a dummy output """ + """Get md5 sums from a dummy output""" test_file_dir = tempfile.mkdtemp() meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", False, "./", False, True) with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: @@ -170,7 +170,7 @@ def test_modules_create_test_yml_get_md5(self): assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" def test_modules_create_test_yml_entry_points(self): - """ Test extracting test entry points from a main.nf file""" + """Test extracting test entry points from a main.nf file""" meta_builder = nf_core.modules.ModulesTestYmlBuilder("star/align", False, "./", False, True) meta_builder.module_test_main = os.path.join( self.nfcore_modules, "tests", "software", "star", "align", "main.nf" @@ -179,7 +179,7 @@ def test_modules_create_test_yml_entry_points(self): assert meta_builder.entry_points[0] == "test_star_align" def test_modules_create_test_yml_check_inputs(self): - """ Test the check_inputs() function - raise UserWarning because test.yml exists """ + """Test the check_inputs() function - raise UserWarning because test.yml exists""" cwd = os.getcwd() os.chdir(self.nfcore_modules) meta_builder = nf_core.modules.ModulesTestYmlBuilder("star/align", False, "./", False, True) @@ -192,7 +192,7 @@ def test_modules_create_test_yml_check_inputs(self): assert "Test YAML file already exists!" in str(excinfo.value) def test_modules_create_nfcore_modules(self): - """ Create a module in nf-core/modules clone """ + """Create a module in nf-core/modules clone""" module_create = nf_core.modules.ModuleCreate( self.nfcore_modules, "fastqc", "@author", "process_low", False, False ) @@ -201,7 +201,7 @@ def test_modules_create_nfcore_modules(self): assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "software", "fastqc", "main.nf")) def test_modules_create_nfcore_modules_subtool(self): - """ Create a tool/subtool module in a nf-core/modules clone """ + """Create a tool/subtool module in a nf-core/modules clone""" module_create = nf_core.modules.ModuleCreate( self.nfcore_modules, "star/index", "@author", "process_medium", False, False ) diff --git a/tests/test_schema.py b/tests/test_schema.py index 2f29a1f0bd..7d37636a0d 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -20,7 +20,7 @@ class TestSchema(unittest.TestCase): """Class for schema tests""" def setUp(self): - """ Create a new PipelineSchema object """ + """Create a new PipelineSchema object""" self.schema_obj = nf_core.schema.PipelineSchema() self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Copy the template to a temp directory so that we can use that for tests @@ -30,19 +30,19 @@ def setUp(self): self.template_schema = os.path.join(self.template_dir, "nextflow_schema.json") def test_load_lint_schema(self): - """ Check linting with the pipeline template directory """ + """Check linting with the pipeline template directory""" self.schema_obj.get_schema_path(self.template_dir) self.schema_obj.load_lint_schema() @pytest.mark.xfail(raises=AssertionError, strict=True) def test_load_lint_schema_nofile(self): - """ Check that linting raises properly if a non-existant file is given """ + """Check that linting raises properly if a non-existant file is given""" self.schema_obj.get_schema_path("fake_file") self.schema_obj.load_lint_schema() @pytest.mark.xfail(raises=AssertionError, strict=True) def test_load_lint_schema_notjson(self): - """ Check that linting raises properly if a non-JSON file is given """ + """Check that linting raises properly if a non-JSON file is given""" self.schema_obj.get_schema_path(os.path.join(self.template_dir, "nextflow.config")) self.schema_obj.load_lint_schema() @@ -59,20 +59,20 @@ def test_load_lint_schema_noparams(self): self.schema_obj.load_lint_schema() def test_get_schema_path_dir(self): - """ Get schema file from directory """ + """Get schema file from directory""" self.schema_obj.get_schema_path(self.template_dir) def test_get_schema_path_path(self): - """ Get schema file from a path """ + """Get schema file from a path""" self.schema_obj.get_schema_path(self.template_schema) @pytest.mark.xfail(raises=AssertionError, strict=True) def test_get_schema_path_path_notexist(self): - """ Get schema file from a path """ + """Get schema file from a path""" self.schema_obj.get_schema_path("fubar", local_only=True) def test_get_schema_path_name(self): - """ Get schema file from the name of a remote pipeline """ + """Get schema file from the name of a remote pipeline""" self.schema_obj.get_schema_path("atacseq") @pytest.mark.xfail(raises=AssertionError, strict=True) @@ -84,12 +84,12 @@ def test_get_schema_path_name_notexist(self): self.schema_obj.get_schema_path("exoseq") def test_load_schema(self): - """ Try to load a schema from a file """ + """Try to load a schema from a file""" self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() def test_save_schema(self): - """ Try to save a schema """ + """Try to save a schema""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -100,7 +100,7 @@ def test_save_schema(self): self.schema_obj.save_schema() def test_load_input_params_json(self): - """ Try to load a JSON file with params for a pipeline run """ + """Try to load a JSON file with params for a pipeline run""" # Make a temporary file to write schema to tmp_file = tempfile.NamedTemporaryFile() with open(tmp_file.name, "w") as fh: @@ -108,7 +108,7 @@ def test_load_input_params_json(self): self.schema_obj.load_input_params(tmp_file.name) def test_load_input_params_yaml(self): - """ Try to load a YAML file with params for a pipeline run """ + """Try to load a YAML file with params for a pipeline run""" # Make a temporary file to write schema to tmp_file = tempfile.NamedTemporaryFile() with open(tmp_file.name, "w") as fh: @@ -117,11 +117,11 @@ def test_load_input_params_yaml(self): @pytest.mark.xfail(raises=AssertionError, strict=True) def test_load_input_params_invalid(self): - """ Check failure when a non-existent file params file is loaded """ + """Check failure when a non-existent file params file is loaded""" self.schema_obj.load_input_params("fubar") def test_validate_params_pass(self): - """ Try validating a set of parameters against a schema """ + """Try validating a set of parameters against a schema""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -129,7 +129,7 @@ def test_validate_params_pass(self): assert self.schema_obj.validate_params() def test_validate_params_fail(self): - """ Check that False is returned if params don't validate against a schema """ + """Check that False is returned if params don't validate against a schema""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -137,7 +137,7 @@ def test_validate_params_fail(self): assert not self.schema_obj.validate_params() def test_validate_schema_pass(self): - """ Check that the schema validation passes """ + """Check that the schema validation passes""" # Load the template schema self.schema_obj.schema_filename = self.template_schema self.schema_obj.load_schema() @@ -145,7 +145,7 @@ def test_validate_schema_pass(self): @pytest.mark.xfail(raises=AssertionError, strict=True) def test_validate_schema_fail_noparams(self): - """ Check that the schema validation fails when no params described """ + """Check that the schema validation fails when no params described""" self.schema_obj.schema = {"type": "invalidthing"} self.schema_obj.validate_schema(self.schema_obj.schema) @@ -196,7 +196,7 @@ def test_validate_schema_fail_unexpected_allof(self): assert e.args[0] == "Subschema `groupThree` found in `allOf` but not `definitions`" def test_make_skeleton_schema(self): - """ Test making a new schema skeleton """ + """Test making a new schema skeleton""" self.schema_obj.schema_filename = self.template_schema self.schema_obj.pipeline_manifest["name"] = "nf-core/test" self.schema_obj.pipeline_manifest["description"] = "Test pipeline" @@ -204,24 +204,24 @@ def test_make_skeleton_schema(self): self.schema_obj.validate_schema(self.schema_obj.schema) def test_get_wf_params(self): - """ Test getting the workflow parameters from a pipeline """ + """Test getting the workflow parameters from a pipeline""" self.schema_obj.schema_filename = self.template_schema self.schema_obj.get_wf_params() def test_prompt_remove_schema_notfound_config_returntrue(self): - """ Remove unrecognised params from the schema """ + """Remove unrecognised params from the schema""" self.schema_obj.pipeline_params = {"foo": "bar"} self.schema_obj.no_prompts = True assert self.schema_obj.prompt_remove_schema_notfound_config("baz") def test_prompt_remove_schema_notfound_config_returnfalse(self): - """ Do not remove unrecognised params from the schema """ + """Do not remove unrecognised params from the schema""" self.schema_obj.pipeline_params = {"foo": "bar"} self.schema_obj.no_prompts = True assert not self.schema_obj.prompt_remove_schema_notfound_config("foo") def test_remove_schema_notfound_configs(self): - """ Remove unrecognised params from the schema """ + """Remove unrecognised params from the schema""" self.schema_obj.schema = { "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, "required": ["foo"], @@ -256,7 +256,7 @@ def test_remove_schema_notfound_configs_childschema(self): assert "foo" in params_removed def test_add_schema_found_configs(self): - """ Try adding a new parameter to the schema from the config """ + """Try adding a new parameter to the schema from the config""" self.schema_obj.pipeline_params = {"foo": "bar"} self.schema_obj.schema = {"properties": {}} self.schema_obj.no_prompts = True @@ -266,23 +266,23 @@ def test_add_schema_found_configs(self): assert "foo" in params_added def test_build_schema_param_str(self): - """ Build a new schema param from a config value (string) """ + """Build a new schema param from a config value (string)""" param = self.schema_obj.build_schema_param("foo") assert param == {"type": "string", "default": "foo"} def test_build_schema_param_bool(self): - """ Build a new schema param from a config value (bool) """ + """Build a new schema param from a config value (bool)""" param = self.schema_obj.build_schema_param("True") print(param) assert param == {"type": "boolean", "default": True} def test_build_schema_param_int(self): - """ Build a new schema param from a config value (int) """ + """Build a new schema param from a config value (int)""" param = self.schema_obj.build_schema_param("12") assert param == {"type": "integer", "default": 12} def test_build_schema_param_int(self): - """ Build a new schema param from a config value (float) """ + """Build a new schema param from a config value (float)""" param = self.schema_obj.build_schema_param("12.34") assert param == {"type": "number", "default": 12.34} @@ -309,7 +309,7 @@ def test_build_schema_from_scratch(self): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_launch_web_builder_timeout(self, mock_post): - """ Mock launching the web builder, but timeout on the request """ + """Mock launching the web builder, but timeout on the request""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.Timeout() self.schema_obj.launch_web_builder() @@ -317,7 +317,7 @@ def test_launch_web_builder_timeout(self, mock_post): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_launch_web_builder_connection_error(self, mock_post): - """ Mock launching the web builder, but get a connection error """ + """Mock launching the web builder, but get a connection error""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.ConnectionError() self.schema_obj.launch_web_builder() @@ -325,7 +325,7 @@ def test_launch_web_builder_connection_error(self, mock_post): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_get_web_builder_response_timeout(self, mock_post): - """ Mock checking for a web builder response, but timeout on the request """ + """Mock checking for a web builder response, but timeout on the request""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.Timeout() self.schema_obj.launch_web_builder() @@ -333,13 +333,13 @@ def test_get_web_builder_response_timeout(self, mock_post): @pytest.mark.xfail(raises=AssertionError, strict=True) @mock.patch("requests.post") def test_get_web_builder_response_connection_error(self, mock_post): - """ Mock checking for a web builder response, but get a connection error """ + """Mock checking for a web builder response, but get a connection error""" # Define the behaviour of the request get mock mock_post.side_effect = requests.exceptions.ConnectionError() self.schema_obj.launch_web_builder() def mocked_requests_post(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -359,7 +359,7 @@ def __init__(self, data, status_code): @mock.patch("requests.post", side_effect=mocked_requests_post) def test_launch_web_builder_404(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "invalid_url" try: self.schema_obj.launch_web_builder() @@ -369,7 +369,7 @@ def test_launch_web_builder_404(self, mock_post): @mock.patch("requests.post", side_effect=mocked_requests_post) def test_launch_web_builder_invalid_status(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "valid_url_error" try: self.schema_obj.launch_web_builder() @@ -380,7 +380,7 @@ def test_launch_web_builder_invalid_status(self, mock_post): @mock.patch("requests.get") @mock.patch("webbrowser.open") def test_launch_web_builder_success(self, mock_post, mock_get, mock_webbrowser): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_url = "valid_url_success" try: self.schema_obj.launch_web_builder() @@ -390,7 +390,7 @@ def test_launch_web_builder_success(self, mock_post, mock_get, mock_webbrowser): assert e.args[0].startswith("Could not access remote API results: https://nf-co.re") def mocked_requests_get(*args, **kwargs): - """ Helper function to emulate GET requests responses from the web """ + """Helper function to emulate GET requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -414,7 +414,7 @@ def __init__(self, data, status_code): @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_404(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "invalid_url" try: self.schema_obj.get_web_builder_response() @@ -424,7 +424,7 @@ def test_get_web_builder_response_404(self, mock_post): @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_error(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_error" try: self.schema_obj.get_web_builder_response() @@ -434,13 +434,13 @@ def test_get_web_builder_response_error(self, mock_post): @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_waiting(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_waiting" assert self.schema_obj.get_web_builder_response() is False @mock.patch("requests.get", side_effect=mocked_requests_get) def test_get_web_builder_response_saved(self, mock_post): - """ Mock launching the web builder """ + """Mock launching the web builder""" self.schema_obj.web_schema_build_api_url = "valid_url_saved" try: self.schema_obj.get_web_builder_response() diff --git a/tests/test_sync.py b/tests/test_sync.py index 3900a95d5d..ce7d07dc7f 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -20,13 +20,13 @@ def setUp(self): self.make_new_pipeline() def make_new_pipeline(self): - """ Create a new pipeline to test """ + """Create a new pipeline to test""" self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) self.create_obj.init_pipeline() def test_inspect_sync_dir_notgit(self): - """ Try syncing an empty directory """ + """Try syncing an empty directory""" psync = nf_core.sync.PipelineSync(tempfile.mkdtemp()) try: psync.inspect_sync_dir() @@ -35,7 +35,7 @@ def test_inspect_sync_dir_notgit(self): assert "does not appear to be a git repository" in e.args[0] def test_inspect_sync_dir_dirty(self): - """ Try syncing a pipeline with uncommitted changes """ + """Try syncing a pipeline with uncommitted changes""" # Add an empty file, uncommitted test_fn = os.path.join(self.pipeline_dir, "uncommitted") open(test_fn, "a").close() @@ -52,7 +52,7 @@ def test_inspect_sync_dir_dirty(self): raise e def test_get_wf_config_no_branch(self): - """ Try getting a workflow config when the branch doesn't exist """ + """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo") try: @@ -63,7 +63,7 @@ def test_get_wf_config_no_branch(self): assert e.args[0] == "Branch `foo` not found!" def test_get_wf_config_missing_required_config(self): - """ Try getting a workflow config, then make it miss a required config option """ + """Try getting a workflow config, then make it miss a required config option""" # Try to sync, check we halt with the right error psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] @@ -78,14 +78,14 @@ def test_get_wf_config_missing_required_config(self): assert e.args[0] == "Workflow config variable `fakethisdoesnotexist` not found!" def test_checkout_template_branch(self): - """ Try checking out the TEMPLATE branch of the pipeline """ + """Try checking out the TEMPLATE branch of the pipeline""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() def test_delete_template_branch_files(self): - """ Confirm that we can delete all files in the TEMPLATE branch """ + """Confirm that we can delete all files in the TEMPLATE branch""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -94,7 +94,7 @@ def test_delete_template_branch_files(self): assert os.listdir(self.pipeline_dir) == [".git"] def test_create_template_pipeline(self): - """ Confirm that we can delete all files in the TEMPLATE branch """ + """Confirm that we can delete all files in the TEMPLATE branch""" # First, delete all the files psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -108,7 +108,7 @@ def test_create_template_pipeline(self): assert "nextflow.config" in os.listdir(self.pipeline_dir) def test_commit_template_changes_nochanges(self): - """ Try to commit the TEMPLATE branch, but no changes were made """ + """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -118,7 +118,7 @@ def test_commit_template_changes_nochanges(self): assert psync.commit_template_changes() is False def test_commit_template_changes_changes(self): - """ Try to commit the TEMPLATE branch, but no changes were made """ + """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -135,11 +135,11 @@ def test_commit_template_changes_changes(self): assert psync.repo.is_dirty(untracked_files=True) is False def raise_git_exception(self): - """ Raise an exception from GitPython""" + """Raise an exception from GitPython""" raise git.exc.GitCommandError("Test") def test_push_template_branch_error(self): - """ Try pushing the changes, but without a remote (should fail) """ + """Try pushing the changes, but without a remote (should fail)""" # Check out the TEMPLATE branch but skip making the new template etc. psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() @@ -157,7 +157,7 @@ def test_push_template_branch_error(self): assert e.args[0].startswith("Could not push TEMPLATE branch") def mocked_requests_get(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -172,7 +172,7 @@ def __init__(self, data, status_code): return MockResponse({"get_url": kwargs["url"]}, 404) def mocked_requests_patch(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -186,7 +186,7 @@ def __init__(self, data, status_code): return MockResponse({"patch_url": kwargs["url"]}, 404) def mocked_requests_post(**kwargs): - """ Helper function to emulate POST requests responses from the web """ + """Helper function to emulate POST requests responses from the web""" class MockResponse: def __init__(self, data, status_code): @@ -203,7 +203,7 @@ def __init__(self, data, status_code): @mock.patch("requests.get", side_effect=mocked_requests_get) @mock.patch("requests.post", side_effect=mocked_requests_post) def test_make_pull_request_success(self, mock_get, mock_post): - """ Try making a PR - successful response """ + """Try making a PR - successful response""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.gh_username = "no_existing_pr" psync.gh_repo = "no_existing_pr/response" @@ -214,7 +214,7 @@ def test_make_pull_request_success(self, mock_get, mock_post): @mock.patch("requests.get", side_effect=mocked_requests_get) @mock.patch("requests.post", side_effect=mocked_requests_post) def test_make_pull_request_bad_response(self, mock_get, mock_post): - """ Try making a PR and getting a 404 error """ + """Try making a PR and getting a 404 error""" psync = nf_core.sync.PipelineSync(self.pipeline_dir) psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" diff --git a/tests/test_utils.py b/tests/test_utils.py index 542a28ee28..c6947861c7 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -129,6 +129,6 @@ def test_pip_package_connection_error(self, mock_get): nf_core.utils.pip_package("multiqc=1.10") def test_pip_erroneous_package(self): - """ Tests the PyPi API package information query """ + """Tests the PyPi API package information query""" with pytest.raises(ValueError): nf_core.utils.pip_package("not_a_package=1.0") From f1999d467ae3bdc3c16e04b4007c15fdf8b71cfc Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 00:15:09 +0200 Subject: [PATCH 159/210] Check if terminal is interactive before prompting for setting --- nf_core/download.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 6e6d14ab0a..2664f0cf44 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -98,6 +98,8 @@ def __init__( self.parallel_downloads = parallel_downloads self.wf_name = None + self.wf_releases = {} + self.wf_branches = {} self.wf_sha = None self.wf_download_url = None self.nf_config = dict() @@ -106,8 +108,6 @@ def __init__( # Fetch remote workflows self.wfs = nf_core.list.Workflows() self.wfs.get_remote_workflows() - self.wf_releases = {} - self.wf_branches = {} def download_workflow(self): """Starts a nf-core workflow download.""" @@ -283,7 +283,11 @@ def prompt_container_download(self): def prompt_use_singularity_cachedir(self): """Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set""" - if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is None: + if ( + self.container == "singularity" + and os.environ.get("NXF_SINGULARITY_CACHEDIR") is None + and stderr.is_interactive # Use rich auto-detection of interactive shells + ): stderr.print( "\nNextflow and nf-core can use an environment variable called [blue]$NXF_SINGULARITY_CACHEDIR[/] that is a path to a directory where remote Singularity images are stored. " "This allows downloaded images to be cached in a central location." From 683242d809a2dee105931974ef68d1c91656fef2 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 00:23:21 +0200 Subject: [PATCH 160/210] Screen container names for dynamic {squiggly_brackets} --- nf_core/download.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 2664f0cf44..621f8dabdf 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -515,6 +515,7 @@ def find_container_images(self): """ log.debug("Fetching container names for workflow") + containers_raw = [] # Use linting code to parse the pipeline nextflow config self.nf_config = nf_core.utils.fetch_wf_config(os.path.join(self.outdir, "workflow")) @@ -522,7 +523,7 @@ def find_container_images(self): # Find any config variables that look like a container for k, v in self.nf_config.items(): if k.startswith("process.") and k.endswith(".container"): - self.containers.append(v.strip('"').strip("'")) + containers_raw.append(v.strip('"').strip("'")) # Recursive search through any DSL2 module files for container spec lines. for subdir, dirs, files in os.walk(os.path.join(self.outdir, "workflow", "modules")): @@ -539,15 +540,24 @@ def find_container_images(self): # If we have matches, save the first one that starts with http for m in matches: if m.startswith("http"): - self.containers.append(m.strip('"').strip("'")) + containers_raw.append(m.strip('"').strip("'")) break # If we get here then we didn't call break - just save the first match else: if len(matches) > 0: - self.containers.append(matches[0].strip('"').strip("'")) + containers_raw.append(matches[0].strip('"').strip("'")) # Remove duplicates and sort - self.containers = sorted(list(set(self.containers))) + containers_raw = sorted(list(set(containers_raw))) + + # Strip any container names that have dynamic names - eg. {params.foo} + self.containers = [] + for container in containers_raw: + if "{" in container and "}" in container: + log.error(f"Container name '{container}' has dynamic Nextflow logic in name - skipping") + log.info("Please use a 'nextflow run' command to fetch this container. Ask on Slack if you need help.") + else: + self.containers.append(container) log.info("Found {} container{}".format(len(self.containers), "s" if len(self.containers) > 1 else "")) From 7b745b3652d34b2c5e7538ff28fab178bc0b3a23 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 00:30:57 +0200 Subject: [PATCH 161/210] Push up minimum version of rich, louder colours --- nf_core/download.py | 4 +++- requirements.txt | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 621f8dabdf..b62acfa980 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -554,7 +554,9 @@ def find_container_images(self): self.containers = [] for container in containers_raw: if "{" in container and "}" in container: - log.error(f"Container name '{container}' has dynamic Nextflow logic in name - skipping") + log.error( + f"[red]Container name [green]'{container}'[/] has dynamic Nextflow logic in name - skipping![/]" + ) log.info("Please use a 'nextflow run' command to fetch this container. Ask on Slack if you need help.") else: self.containers.append(container) diff --git a/requirements.txt b/requirements.txt index a8afe909ce..3905fe6de7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,5 +9,5 @@ pytest-workflow questionary>=1.8.0 requests_cache requests -rich>=9.8.2 -tabulate \ No newline at end of file +rich>=9.11.0 +tabulate From b66961c1528411f04804454e1304127621cc9f7b Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 00:34:28 +0200 Subject: [PATCH 162/210] Bump rich minimum version again, as v1.10.0 fixed table style issue --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3905fe6de7..33da40c47e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,5 +9,5 @@ pytest-workflow questionary>=1.8.0 requests_cache requests -rich>=9.11.0 +rich>=10.0.0 tabulate From a0bc438dfdf8b0b895e252df46094366fb0d7508 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 01:07:06 +0200 Subject: [PATCH 163/210] Fix pytests for download code --- nf_core/download.py | 18 +++++--- tests/test_download.py | 94 ++++++++++++++---------------------------- 2 files changed, 44 insertions(+), 68 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index b62acfa980..6e25325940 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -201,11 +201,11 @@ def prompt_pipeline_name(self): if self.pipeline.count("/") == 1: gh_response = requests.get(f"https://api.github.com/repos/{self.pipeline}") try: - assert gh_response.json().get("message") == "Not Found" + assert gh_response.json().get("message") != "Not Found" except AssertionError: - pass - else: raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + except AttributeError: + pass # When things are working we get a list, which doesn't work with .get() else: log.info( "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) @@ -419,16 +419,24 @@ def fetch_workflow_details(self): # Get releases from GitHub API releases_url = f"https://api.github.com/repos/{self.wf_name}/releases" releases_response = requests.get(releases_url) + + # Check that this repo existed + try: + assert releases_response.json().get("message") != "Not Found" + except AssertionError: + raise LookupError(f"Not able to find pipeline '{self.pipeline}'") + except AttributeError: + pass # When things are working we get a list, which doesn't work with .get() self.wf_releases = list( sorted(releases_response.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True) ) else: - log.error("Not able to find pipeline '{}'".format(self.pipeline)) + log.error(f"Not able to find pipeline '{self.pipeline}'") log.info( "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) ) - raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + raise LookupError(f"Not able to find pipeline '{self.pipeline}'") # Get branch information from github api branches_url = f"https://api.github.com/repos/{self.wf_name}/branches" diff --git a/tests/test_download.py b/tests/test_download.py index 12fcfdfdfc..0ac72feded 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -20,70 +20,37 @@ class DownloadTest(unittest.TestCase): # # Tests for 'fetch_workflow_details()' # - @mock.patch("nf_core.list.RemoteWorkflow") - @mock.patch("nf_core.list.Workflows") - def test_fetch_workflow_details_for_release(self, mock_workflows, mock_workflow): - download_obj = DownloadWorkflow(pipeline="dummy", release="1.0.0") - mock_workflow.name = "dummy" - mock_workflow.releases = [{"tag_name": "1.0.0", "tag_sha": "n3v3rl4nd"}] - mock_workflows.remote_workflows = [mock_workflow] - - download_obj.fetch_workflow_details(mock_workflows) - - @mock.patch("nf_core.list.RemoteWorkflow") - @mock.patch("nf_core.list.Workflows") - def test_fetch_workflow_details_for_dev_version(self, mock_workflows, mock_workflow): - download_obj = DownloadWorkflow(pipeline="dummy") - mock_workflow.name = "dummy" - mock_workflow.releases = [] - mock_workflows.remote_workflows = [mock_workflow] - - download_obj.fetch_workflow_details(mock_workflows) - - @mock.patch("nf_core.list.RemoteWorkflow") - @mock.patch("nf_core.list.Workflows") - def test_fetch_workflow_details_and_autoset_release(self, mock_workflows, mock_workflow): - download_obj = DownloadWorkflow(pipeline="dummy") - mock_workflow.name = "dummy" - mock_workflow.releases = [{"tag_name": "1.0.0", "tag_sha": "n3v3rl4nd"}] - mock_workflows.remote_workflows = [mock_workflow] - - download_obj.fetch_workflow_details(mock_workflows) - assert download_obj.release == "1.0.0" + def test_fetch_workflow_details_for_nf_core(self): + download_obj = DownloadWorkflow(pipeline="methylseq") + download_obj.fetch_workflow_details() + assert download_obj.wf_name == "nf-core/methylseq" + for r in download_obj.wf_releases: + if r.get("tag_name") == "1.6": + break + else: + raise AssertionError("Release 1.6 not found") + assert "dev" in download_obj.wf_branches.keys() + + def test_fetch_workflow_details_for_not_nf_core(self): + download_obj = DownloadWorkflow(pipeline="ewels/MultiQC") + download_obj.fetch_workflow_details() + assert download_obj.wf_name == "ewels/MultiQC" + for r in download_obj.wf_releases: + if r.get("tag_name") == "v1.10": + break + else: + raise AssertionError("MultiQC release v1.10 not found") + assert "master" in download_obj.wf_branches.keys() - @mock.patch("nf_core.list.RemoteWorkflow") - @mock.patch("nf_core.list.Workflows") @pytest.mark.xfail(raises=LookupError, strict=True) - def test_fetch_workflow_details_for_unknown_release(self, mock_workflows, mock_workflow): - download_obj = DownloadWorkflow(pipeline="dummy", release="1.2.0") - mock_workflow.name = "dummy" - mock_workflow.releases = [{"tag_name": "1.0.0", "tag_sha": "n3v3rl4nd"}] - mock_workflows.remote_workflows = [mock_workflow] - - download_obj.fetch_workflow_details(mock_workflows) + def test_fetch_workflow_details_not_exists(self): + download_obj = DownloadWorkflow(pipeline="made_up_pipeline") + download_obj.fetch_workflow_details() - @mock.patch("nf_core.list.Workflows") - def test_fetch_workflow_details_for_github_ressource(self, mock_workflows): - download_obj = DownloadWorkflow(pipeline="myorg/dummy", release="1.2.0") - mock_workflows.remote_workflows = [] - - download_obj.fetch_workflow_details(mock_workflows) - - @mock.patch("nf_core.list.Workflows") - def test_fetch_workflow_details_for_github_ressource_take_master(self, mock_workflows): - download_obj = DownloadWorkflow(pipeline="myorg/dummy") - mock_workflows.remote_workflows = [] - - download_obj.fetch_workflow_details(mock_workflows) - assert download_obj.release == "master" - - @mock.patch("nf_core.list.Workflows") @pytest.mark.xfail(raises=LookupError, strict=True) - def test_fetch_workflow_details_no_search_result(self, mock_workflows): - download_obj = DownloadWorkflow(pipeline="http://my-server.org/dummy", release="1.2.0") - mock_workflows.remote_workflows = [] - - download_obj.fetch_workflow_details(mock_workflows) + def test_fetch_workflow_details_not_exists_slash(self): + download_obj = DownloadWorkflow(pipeline="made-up/pipeline") + download_obj.fetch_workflow_details() # # Tests for 'download_wf_files' @@ -190,7 +157,8 @@ def test_singularity_pull_image(self, mock_rich_progress): # Tests for the main entry method 'download_workflow' # @mock.patch("nf_core.download.DownloadWorkflow.singularity_pull_image") - def test_download_workflow_with_success(self, mock_download_image): + @mock.patch("shutil.which") + def test_download_workflow_with_success(self, mock_download_image, mock_singularity_installed): tmp_dir = tempfile.mkdtemp() @@ -198,8 +166,8 @@ def test_download_workflow_with_success(self, mock_download_image): pipeline="nf-core/methylseq", outdir=os.path.join(tmp_dir, "new"), container="singularity", - release="dev", - compress="none", + release="1.6", + compress_type="none", ) download_obj.download_workflow() From bd8152becd9a982d5aab2098af93059b0a368e82 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 01:17:18 +0200 Subject: [PATCH 164/210] Don't force interactive terminal on GitHub Actions for pytest --- tests/test_download.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_download.py b/tests/test_download.py index 0ac72feded..db90db60f4 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -161,6 +161,10 @@ def test_singularity_pull_image(self, mock_rich_progress): def test_download_workflow_with_success(self, mock_download_image, mock_singularity_installed): tmp_dir = tempfile.mkdtemp() + try: + del os.environ["GITHUB_ACTIONS"] + except KeyError: + pass download_obj = DownloadWorkflow( pipeline="nf-core/methylseq", From 3c837b21f061a8467854534143eadf590d041edc Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 01:19:45 +0200 Subject: [PATCH 165/210] Run pytests with Python 3.9 too --- .github/workflows/create-lint-wf.yml | 4 ++-- .github/workflows/pytest.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index cd87a22c4e..3432a8f9f4 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -10,10 +10,10 @@ jobs: - uses: actions/checkout@v2 name: Check out source-code repository - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 3.7 + python-version: 3.8 - name: Install python dependencies run: | diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b31b62e8b5..b9c6381423 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -16,7 +16,7 @@ jobs: NXF_VER: 21.03.0-edge strategy: matrix: - python-version: [3.6, 3.7, 3.8] + python-version: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 From 9bbdbe3fac7d9aa65a13a0186d60ded53cd6cb6f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 01:22:13 +0200 Subject: [PATCH 166/210] Different approach to try to avoid interactive prompt in tests --- tests/test_download.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/test_download.py b/tests/test_download.py index db90db60f4..0009a19744 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -161,10 +161,7 @@ def test_singularity_pull_image(self, mock_rich_progress): def test_download_workflow_with_success(self, mock_download_image, mock_singularity_installed): tmp_dir = tempfile.mkdtemp() - try: - del os.environ["GITHUB_ACTIONS"] - except KeyError: - pass + os.environ["NXF_SINGULARITY_CACHEDIR"] = "foo" download_obj = DownloadWorkflow( pipeline="nf-core/methylseq", From 67ec08bca1d045419f8d3b0156c36399f85018f7 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 22:38:22 +0200 Subject: [PATCH 167/210] Refactor: Move bunch of prompt / lookup code into utils for reuse --- nf_core/download.py | 142 +++++++------------------------------------ nf_core/utils.py | 145 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 166 insertions(+), 121 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 6e25325940..970d3fca76 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -97,7 +97,6 @@ def __init__( self.singularity_cache_only = singularity_cache_only self.parallel_downloads = parallel_downloads - self.wf_name = None self.wf_releases = {} self.wf_branches = {} self.wf_sha = None @@ -115,14 +114,14 @@ def download_workflow(self): # Get workflow details try: self.prompt_pipeline_name() - self.fetch_workflow_details() + self.wf_releases, self.wf_branches = nf_core.utils.get_repo_releases_branches(self.pipeline, self.wfs) self.prompt_release() self.get_release_hash() self.prompt_container_download() self.prompt_use_singularity_cachedir() self.prompt_singularity_cachedir_only() self.prompt_compression_type() - except LookupError as e: + except AssertionError as e: log.critical(e) sys.exit(1) @@ -165,7 +164,12 @@ def download_workflow(self): # Download the centralised configs log.info("Downloading centralised configs from GitHub") self.download_configs() - self.wf_use_local_configs() + try: + self.wf_use_local_configs() + except FileNotFoundError as e: + log.error("Error editing pipeline config file to use local configs!") + log.critical(e) + sys.exit(1) # Download the singularity images if self.container == "singularity": @@ -186,54 +190,13 @@ def prompt_pipeline_name(self): if self.pipeline is None: stderr.print("Specify the name of a nf-core pipeline or a GitHub repository name (user/repo).") - self.pipeline = questionary.autocomplete( - "Pipeline name:", - choices=[wf.name for wf in self.wfs.remote_workflows], - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - # Fast-fail for unrecognised pipelines (we check again at the end) - for wf in self.wfs.remote_workflows: - if wf.full_name == self.pipeline or wf.name == self.pipeline: - break - else: - # Non nf-core GitHub repo - if self.pipeline.count("/") == 1: - gh_response = requests.get(f"https://api.github.com/repos/{self.pipeline}") - try: - assert gh_response.json().get("message") != "Not Found" - except AssertionError: - raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) - except AttributeError: - pass # When things are working we get a list, which doesn't work with .get() - else: - log.info( - "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) - ) - raise LookupError("Not able to find pipeline '{}'".format(self.pipeline)) + self.pipeline = nf_core.utils.prompt_remote_pipeline_name(self.wfs) def prompt_release(self): """Prompt for pipeline release / branch""" # Prompt user for release tag if '--release' was not set if self.release is None: - choices = [] - - # Releases - if len(self.wf_releases) > 0: - for tag in map(lambda release: release.get("tag_name"), self.wf_releases): - tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] - choices.append(questionary.Choice(title=tag_display, value=tag)) - - # Branches - for branch in self.wf_branches.keys(): - branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] - choices.append(questionary.Choice(title=branch_display, value=branch)) - - if len(choices) > 0: - stderr.print("\nChoose the release or branch that should be downloaded.") - self.release = questionary.select( - "Select release / branch:", choices=choices, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() + self.release = nf_core.utils.prompt_pipeline_release_branch(self.wf_releases, self.wf_branches) def get_release_hash(self): """Find specified release / branch hash""" @@ -245,27 +208,28 @@ def get_release_hash(self): # Release else: for r in self.wf_releases: - if r["tag_name"] == self.release.lstrip("v"): + if r["tag_name"] == self.release: self.wf_sha = r["tag_sha"] break # Can't find the release or branch - throw an error else: - log.error("Not able to find release '{}' for {}".format(self.release, self.wf_name)) log.info( - "Available {} releases: {}".format( - self.wf_name, ", ".join([r["tag_name"] for r in self.wf_releases]) + "Available {} releases: '{}'".format( + self.pipeline, "', '".join([r["tag_name"] for r in self.wf_releases]) ) ) - log.info("Available {} branches: '{}'".format(self.wf_name, "', '".join(self.wf_branches.keys()))) - raise LookupError("Not able to find release / branch '{}' for {}".format(self.release, self.wf_name)) + log.info("Available {} branches: '{}'".format(self.pipeline, "', '".join(self.wf_branches.keys()))) + raise AssertionError( + "Not able to find release / branch '{}' for {}".format(self.release, self.pipeline) + ) # Set the outdir if not self.outdir: - self.outdir = "{}-{}".format(self.wf_name.replace("/", "-").lower(), self.release) + self.outdir = "{}-{}".format(self.pipeline.replace("/", "-").lower(), self.release) # Set the download URL and return - self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.wf_name, self.wf_sha) + self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.pipeline, self.wf_sha) def prompt_container_download(self): """Prompt whether to download container images or not""" @@ -358,7 +322,7 @@ def prompt_singularity_cachedir_only(self): # Sanity check, for when passed as a cli flag if self.singularity_cache_only and self.container != "singularity": - raise LookupError("Command has '--singularity-cache-only' set, but '--container' is not 'singularity'") + raise AssertionError("Command has '--singularity-cache-only' set, but '--container' is not 'singularity'") def prompt_compression_type(self): """Ask user if we should compress the downloaded files""" @@ -385,70 +349,6 @@ def prompt_compression_type(self): if self.compress_type == "none": self.compress_type = None - def fetch_workflow_details(self): - """Fetches details of a nf-core workflow to download. - - Raises: - LockupError, if the pipeline can not be found. - """ - - # Get workflow download details - for wf in self.wfs.remote_workflows: - if wf.full_name == self.pipeline or wf.name == self.pipeline: - - # Set pipeline name - self.wf_name = wf.full_name - - # Store releases - self.wf_releases = list( - sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True) - ) - - break - - # Must not be a nf-core pipeline - else: - if self.pipeline.count("/") == 1: - - # Looks like a GitHub address - try working with this repo - self.wf_name = self.pipeline - log.info( - f"Pipeline '{self.wf_name}' not in nf-core, but looks like a GitHub address - attempting anyway" - ) - - # Get releases from GitHub API - releases_url = f"https://api.github.com/repos/{self.wf_name}/releases" - releases_response = requests.get(releases_url) - - # Check that this repo existed - try: - assert releases_response.json().get("message") != "Not Found" - except AssertionError: - raise LookupError(f"Not able to find pipeline '{self.pipeline}'") - except AttributeError: - pass # When things are working we get a list, which doesn't work with .get() - self.wf_releases = list( - sorted(releases_response.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True) - ) - - else: - log.error(f"Not able to find pipeline '{self.pipeline}'") - log.info( - "Available nf-core pipelines: '{}'".format("', '".join([w.name for w in self.wfs.remote_workflows])) - ) - raise LookupError(f"Not able to find pipeline '{self.pipeline}'") - - # Get branch information from github api - branches_url = f"https://api.github.com/repos/{self.wf_name}/branches" - branch_response = requests.get(branches_url) - for branch in branch_response.json(): - if ( - branch["name"] != "TEMPLATE" - and branch["name"] != "initial_commit" - and not branch["name"].startswith("nf-core-template-merge") - ): - self.wf_branches[branch["name"]] = branch["commit"]["sha"] - def download_wf_files(self): """Downloads workflow files from GitHub to the :attr:`self.outdir`.""" log.debug("Downloading {}".format(self.wf_download_url)) @@ -459,7 +359,7 @@ def download_wf_files(self): zipfile.extractall(self.outdir) # Rename the internal directory name to be more friendly - gh_name = "{}-{}".format(self.wf_name, self.wf_sha).split("/")[-1] + gh_name = "{}-{}".format(self.pipeline, self.wf_sha).split("/")[-1] os.rename(os.path.join(self.outdir, gh_name), os.path.join(self.outdir, "workflow")) # Make downloaded files executable diff --git a/nf_core/utils.py b/nf_core/utils.py index 2670f0310d..0cd3e24d78 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -13,6 +13,7 @@ import logging import os import prompt_toolkit +import questionary import re import requests import requests_cache @@ -550,3 +551,147 @@ def write_line_break(self, data=None): CustomDumper.add_representer(dict, CustomDumper.represent_dict_preserve_order) return CustomDumper + + +def prompt_remote_pipeline_name(wfs): + """Prompt for the pipeline name with questionary + + Args: + wfs: A nf_core.list.Workflows() object, where get_remote_workflows() has been called. + + Returns: + pipeline (str): GitHub repo - username/repo + + Raises: + AssertionError, if pipeline cannot be found + """ + + pipeline = questionary.autocomplete( + "Pipeline name:", + choices=[wf.name for wf in wfs.remote_workflows], + style=nfcore_question_style, + ).unsafe_ask() + + # Check nf-core repos + for wf in wfs.remote_workflows: + if wf.full_name == pipeline or wf.name == pipeline: + return wf.full_name + + # Non nf-core repo on GitHub + else: + if pipeline.count("/") == 1: + try: + gh_response = requests.get(f"https://api.github.com/repos/{pipeline}") + assert gh_response.json().get("message") != "Not Found" + except AssertionError: + pass + else: + return pipeline + + log.info("Available nf-core pipelines: '{}'".format("', '".join([w.name for w in wfs.remote_workflows]))) + raise AssertionError(f"Not able to find pipeline '{pipeline}'") + + +def prompt_pipeline_release_branch(wf_releases, wf_branches): + """Prompt for pipeline release / branch + + Args: + wf_releases (array): Array of repo releases as returned by the GitHub API + wf_branches (array): Array of repo branches, as returned by the GitHub API + + Returns: + choice (str): Selected release / branch name + """ + # Prompt user for release tag + choices = [] + + # Releases + if len(wf_releases) > 0: + for tag in map(lambda release: release.get("tag_name"), wf_releases): + tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] + choices.append(questionary.Choice(title=tag_display, value=tag)) + + # Branches + for branch in wf_branches.keys(): + branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] + choices.append(questionary.Choice(title=branch_display, value=branch)) + + if len(choices) == 0: + return False + + return questionary.select("Select release / branch:", choices=choices, style=nfcore_question_style).unsafe_ask() + + +def get_repo_releases_branches(pipeline, wfs): + """Fetches details of a nf-core workflow to download. + + Args: + pipeline (str): GitHub repo username/repo + wfs: A nf_core.list.Workflows() object, where get_remote_workflows() has been called. + + Returns: + wf_releases, wf_branches (tuple): Array of releases, Array of branches + + Raises: + LockupError, if the pipeline can not be found. + """ + + wf_releases = [] + wf_branches = {} + + # Repo is a nf-core pipeline + for wf in wfs.remote_workflows: + if wf.full_name == pipeline or wf.name == pipeline: + + # Set to full name just in case it didn't have the nf-core/ prefix + pipeline = wf.full_name + + # Store releases and stop loop + wf_releases = list(sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + break + + # Arbitrary GitHub repo + else: + if pipeline.count("/") == 1: + + # Looks like a GitHub address - try working with this repo + log.debug( + f"Pipeline '{pipeline}' not in nf-core, but looks like a GitHub address - fetching releases from API" + ) + + # Get releases from GitHub API + rel_r = requests.get(f"https://api.github.com/repos/{pipeline}/releases") + + # Check that this repo existed + try: + assert rel_r.json().get("message") != "Not Found" + except AssertionError: + raise AssertionError(f"Not able to find pipeline '{pipeline}'") + except AttributeError: + # When things are working we get a list, which doesn't work with .get() + wf_releases = list(sorted(rel_r.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + + # Get release tag commit hashes + if len(wf_releases) > 0: + # Get commit hash information for each release + tags_r = requests.get(f"https://api.github.com/repos/{pipeline}/tags") + for tag in tags_r.json(): + for release in wf_releases: + if tag["name"] == release["tag_name"]: + release["tag_sha"] = tag["commit"]["sha"] + + else: + log.info("Available nf-core pipelines: '{}'".format("', '".join([w.name for w in wfs.remote_workflows]))) + raise AssertionError(f"Not able to find pipeline '{pipeline}'") + + # Get branch information from github api - should be no need to check if the repo exists again + branch_response = requests.get(f"https://api.github.com/repos/{pipeline}/branches") + for branch in branch_response.json(): + if ( + branch["name"] != "TEMPLATE" + and branch["name"] != "initial_commit" + and not branch["name"].startswith("nf-core-template-merge") + ): + wf_branches[branch["name"]] = branch["commit"]["sha"] + + return wf_releases, wf_branches From ed9dd62f99432448d2a5b37993f2b55b8687d658 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 1 May 2021 23:03:18 +0200 Subject: [PATCH 168/210] Launch: Prompt for pipeline and release --- nf_core/download.py | 5 +-- nf_core/launch.py | 90 ++++++++++++++++++++------------------------- 2 files changed, 40 insertions(+), 55 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 970d3fca76..591fcad29c 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -238,10 +238,7 @@ def prompt_container_download(self): stderr.print("\nIn addition to the pipeline code, this tool can download software containers.") self.container = questionary.select( "Download software container images:", - choices=[ - "none", - "singularity", - ], + choices=["none", "singularity"], style=nf_core.utils.nfcore_question_style, ).unsafe_ask() diff --git a/nf_core/launch.py b/nf_core/launch.py index 216037477b..8547554ca5 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -57,7 +57,11 @@ def __init__( if self.web_id: self.web_schema_launch_web_url = "{}?id={}".format(self.web_schema_launch_url, web_id) self.web_schema_launch_api_url = "{}?id={}&api=true".format(self.web_schema_launch_url, web_id) - self.nextflow_cmd = "nextflow run {}".format(self.pipeline) + self.nextflow_cmd = None + + # Fetch remote workflows + self.wfs = nf_core.list.Workflows() + self.wfs.get_remote_workflows() # Prepend property names with a single hyphen in case we have parameters with the same ID self.nxf_flag_schema = { @@ -94,12 +98,24 @@ def __init__( def launch_pipeline(self): - # Check that we have everything we need + # Prompt for pipeline if not supplied and no web launch ID if self.pipeline is None and self.web_id is None: - log.error( - "Either a pipeline name or web cache ID is required. Please see nf-core launch --help for more information." - ) - return False + launch_type = questionary.select( + "Launch local pipeline or remote GitHub pipeline?", + choices=["Remote pipeline", "Local path"], + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + if launch_type == "Remote pipeline": + try: + self.pipeline = nf_core.utils.prompt_remote_pipeline_name(self.wfs) + except AssertionError as e: + log.error(e.args[0]) + return False + else: + self.pipeline = questionary.path( + "Path to workflow:", style=nf_core.utils.nfcore_question_style + ).unsafe_ask() # Check if the output file exists already if os.path.exists(self.params_out): @@ -111,7 +127,9 @@ def launch_pipeline(self): log.info("Exiting. Use --params-out to specify a custom filename.") return False - log.info("This tool ignores any pipeline parameter defaults overwritten by Nextflow config files or profiles\n") + log.info( + "NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config files or profiles\n" + ) # Check if we have a web ID if self.web_id is not None: @@ -170,29 +188,25 @@ def get_pipeline_schema(self): self.schema_obj = nf_core.schema.PipelineSchema() # Check if this is a local directory - if os.path.exists(self.pipeline): + localpath = os.path.abspath(os.path.expanduser(self.pipeline)) + if os.path.exists(localpath): # Set the nextflow launch command to use full paths - self.nextflow_cmd = "nextflow run {}".format(os.path.abspath(self.pipeline)) + self.pipeline = localpath + self.nextflow_cmd = f"nextflow run {localpath}" else: # Assume nf-core if no org given if self.pipeline.count("/") == 0: - self.nextflow_cmd = "nextflow run nf-core/{}".format(self.pipeline) + self.pipeline = f"nf-core/{self.pipeline}" + self.nextflow_cmd = "nextflow run {}".format(self.pipeline) if not self.pipeline_revision: - check_for_releases = Confirm.ask("Would you like to select a specific release?") - if check_for_releases: - try: - release_tags = self.try_fetch_release_tags() - self.pipeline_revision = questionary.select( - "Please select a release:", - choices=release_tags, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - except LookupError: - pass - - # Add revision flag to commands if set - if self.pipeline_revision: + try: + wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches(self.pipeline, self.wfs) + except AssertionError as e: + log.error(e) + return False + + self.pipeline_revision = nf_core.utils.prompt_pipeline_release_branch(wf_releases, wf_branches) self.nextflow_cmd += " -r {}".format(self.pipeline_revision) # Get schema from name, load it and lint it @@ -208,7 +222,7 @@ def get_pipeline_schema(self): if not os.path.exists(os.path.join(self.schema_obj.pipeline_dir, "nextflow.config")) and not os.path.exists( os.path.join(self.schema_obj.pipeline_dir, "main.nf") ): - log.error("Could not find a main.nf or nextfow.config file, are you sure this is a pipeline?") + log.error("Could not find a 'main.nf' or 'nextflow.config' file, are you sure this is a pipeline?") return False # Build a schema for this pipeline @@ -223,32 +237,6 @@ def get_pipeline_schema(self): log.error("Could not build pipeline schema: {}".format(e)) return False - def try_fetch_release_tags(self): - """Tries to fetch tag names of pipeline releases from github - - Returns: - release_tags (list[str]): Returns list of release tags - - Raises: - LookupError, if no releases were found - """ - # Fetch releases from github api - releases_url = "https://api.github.com/repos/nf-core/{}/releases".format(self.pipeline) - response = requests.get(releases_url) - if not response.ok: - log.error(f"Unable to find any release tags for {self.pipeline}. Will try to continue launch.") - raise LookupError - - # Filter out the release tags and sort them - release_tags = map(lambda release: release.get("tag_name", None), response.json()) - release_tags = filter(lambda tag: tag != None, release_tags) - release_tags = list(release_tags) - if len(release_tags) == 0: - log.error(f"Unable to find any release tags for {self.pipeline}. Will try to continue launch.") - raise LookupError - release_tags = sorted(release_tags, key=lambda tag: tag.get("published_at_timestamp", 0), reverse=True) - return release_tags - def set_schema_inputs(self): """ Take the loaded schema and set the defaults as the input parameters From bf7ec67bbc7465525c4f502c31b9c0c2a73676d6 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 3 May 2021 13:33:54 +0200 Subject: [PATCH 169/210] added check for parameter description --- nf_core/lint/__init__.py | 2 ++ nf_core/lint/schema_description.py | 41 ++++++++++++++++++++++++++++++ nf_core/lint/schema_params.py | 3 ++- 3 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 nf_core/lint/schema_description.py diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 82c4d57ddd..b553cf5b7a 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -113,6 +113,7 @@ class PipelineLint(nf_core.utils.Pipeline): from .readme import readme from .schema_lint import schema_lint from .schema_params import schema_params + from .schema_description import schema_description from .template_strings import template_strings from .version_consistency import version_consistency @@ -147,6 +148,7 @@ def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=Fal "template_strings", "schema_lint", "schema_params", + "schema_description", "actions_schema_validation", "merge_markers", ] diff --git a/nf_core/lint/schema_description.py b/nf_core/lint/schema_description.py new file mode 100644 index 0000000000..b46cfeec70 --- /dev/null +++ b/nf_core/lint/schema_description.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +from logging import warn +import nf_core.schema + + +def schema_description(self): + """Check that the schema describes all flat params in the pipeline. + + The ``nextflow_schema.json`` pipeline schema should describe every flat parameter + returned from the ``nextflow config`` command (params that are objects or more complex structures are ignored). + + * Failure: If parameters are found in ``nextflow_schema.json`` that are not in ``nextflow_schema.json`` + * Warning: If parameters are found in ``nextflow_schema.json`` that are not in ``nextflow_schema.json`` + """ + passed = [] + warned = [] + failed = [] + + # First, get the top-level config options for the pipeline + # Schema object already created in the `schema_lint` test + self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj.get_schema_path(self.wf_path) + self.schema_obj.get_wf_params() + self.schema_obj.no_prompts = True + self.schema_obj.load_lint_schema() + + # Get ungrouped params + ungrouped_params = self.schema_obj.schema["properties"].keys() + for up in ungrouped_params: + warned.append(f"Ungrouped param in schema {up}") + + # Iterate over groups and add warning for parameters without a description + for group_key in self.schema_obj.schema["definitions"].keys(): + group = self.schema_obj.schema["definitions"][group_key] + for param_key in group["properties"].keys(): + param = group["properties"][param_key] + if not "description" in param.keys(): + warned.append(f"No description provided in schema for parameter '{param_key}'") + + return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/schema_params.py b/nf_core/lint/schema_params.py index 580e9129d8..20c962c226 100644 --- a/nf_core/lint/schema_params.py +++ b/nf_core/lint/schema_params.py @@ -17,10 +17,11 @@ def schema_params(self): failed = [] # First, get the top-level config options for the pipeline - # Schema object already created in the `schema_lint` test + self.schema_obj = nf_core.schema.PipelineSchema() self.schema_obj.get_schema_path(self.wf_path) self.schema_obj.get_wf_params() self.schema_obj.no_prompts = True + self.schema_obj.load_lint_schema() # Remove any schema params not found in the config removed_params = self.schema_obj.remove_schema_notfound_configs() From 74b8f10baf610899bd52b7a5ee46e9500b2e1917 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 3 May 2021 13:37:27 +0200 Subject: [PATCH 170/210] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ada9702873..6635774a02 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ * Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory * New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] * New modules lint test comparing the `functions.nf` file to the template version +* Added lint checks for missing parameter description and parameters outside of groups [[#1017](https://github.com/nf-core/tools/issues/1017)] ### Template From d4c9b9e15d17199e005b4bc3d6e4475e008a40b3 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 3 May 2021 14:19:38 +0200 Subject: [PATCH 171/210] fix camelCase params warnings --- nf_core/pipeline-template/lib/NfcoreSchema.groovy | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy index d591b434c3..52ee730432 100644 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ b/nf_core/pipeline-template/lib/NfcoreSchema.groovy @@ -112,8 +112,14 @@ class NfcoreSchema { } // unexpected params def params_ignore = params.schema_ignore_params.split(',') + 'schema_ignore_params' - if (!expectedParams.contains(specifiedParam) && !params_ignore.contains(specifiedParam)) { - unexpectedParams.push(specifiedParam) + def expectedParamsLowerCase = expectedParams.collect{ it.replace("-", "").toLowerCase() } + def specifiedParamLowerCase = specifiedParam.replace("-", "").toLowerCase() + if (!expectedParams.contains(specifiedParam) && !params_ignore.contains(specifiedParam) && !expectedParamsLowerCase.contains(specifiedParamLowerCase)) { + // Temporarily remove camelCase/camel-case params #1035 + def unexpectedParamsLowerCase = unexpectedParams.collect{ it.replace("-", "").toLowerCase()} + if (!unexpectedParamsLowerCase.contains(specifiedParamLowerCase)){ + unexpectedParams.push(specifiedParam) + } } } From d59b7dbad58c3775c82843b7c59dcaf7ae724d7b Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 3 May 2021 14:21:41 +0200 Subject: [PATCH 172/210] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ada9702873..d11f81b4d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ * Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory * New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] * New modules lint test comparing the `functions.nf` file to the template version +* Added temporary fix to remove warnings about params that get converted from camelCase to camel-case [[#1035](https://github.com/nf-core/tools/issues/1035)] ### Template From 4ed2c31b12e7ec2dd317c54ed66a20c29a76b6ce Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 3 May 2021 14:31:09 +0200 Subject: [PATCH 173/210] fixed missing things/tests --- .../pipeline_lint_tests/schema_description.rst | 4 ++++ nf_core/lint/schema_description.py | 15 ++++++++------- 2 files changed, 12 insertions(+), 7 deletions(-) create mode 100644 docs/api/_src/pipeline_lint_tests/schema_description.rst diff --git a/docs/api/_src/pipeline_lint_tests/schema_description.rst b/docs/api/_src/pipeline_lint_tests/schema_description.rst new file mode 100644 index 0000000000..8733e203e5 --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/schema_description.rst @@ -0,0 +1,4 @@ +schema_description +=========== + +.. automethod:: nf_core.lint.PipelineLint.schema_description diff --git a/nf_core/lint/schema_description.py b/nf_core/lint/schema_description.py index b46cfeec70..19b4a1b242 100644 --- a/nf_core/lint/schema_description.py +++ b/nf_core/lint/schema_description.py @@ -5,13 +5,13 @@ def schema_description(self): - """Check that the schema describes all flat params in the pipeline. + """Check that every parameter in the schema has a description The ``nextflow_schema.json`` pipeline schema should describe every flat parameter - returned from the ``nextflow config`` command (params that are objects or more complex structures are ignored). + Furthermore warns about parameters outside of groups - * Failure: If parameters are found in ``nextflow_schema.json`` that are not in ``nextflow_schema.json`` - * Warning: If parameters are found in ``nextflow_schema.json`` that are not in ``nextflow_schema.json`` + * Warning: Parameters in ``nextflow_schema.json`` without a description + * Warning: Parameters in ``nextflow_schema.json`` that are defined outside of a group """ passed = [] warned = [] @@ -26,9 +26,10 @@ def schema_description(self): self.schema_obj.load_lint_schema() # Get ungrouped params - ungrouped_params = self.schema_obj.schema["properties"].keys() - for up in ungrouped_params: - warned.append(f"Ungrouped param in schema {up}") + if "properties" in self.schema_obj.schema.keys(): + ungrouped_params = self.schema_obj.schema["properties"].keys() + for up in ungrouped_params: + warned.append(f"Ungrouped param in schema {up}") # Iterate over groups and add warning for parameters without a description for group_key in self.schema_obj.schema["definitions"].keys(): From 5178790a6017b932b3babb1a0c616a0b31213f12 Mon Sep 17 00:00:00 2001 From: Kevin Menden Date: Tue, 4 May 2021 08:19:59 +0200 Subject: [PATCH 174/210] Apply suggestions from code review Co-authored-by: Phil Ewels --- nf_core/lint/schema_description.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/nf_core/lint/schema_description.py b/nf_core/lint/schema_description.py index 19b4a1b242..0e517e5c71 100644 --- a/nf_core/lint/schema_description.py +++ b/nf_core/lint/schema_description.py @@ -29,14 +29,13 @@ def schema_description(self): if "properties" in self.schema_obj.schema.keys(): ungrouped_params = self.schema_obj.schema["properties"].keys() for up in ungrouped_params: - warned.append(f"Ungrouped param in schema {up}") + warned.append(f"Ungrouped param in schema: `{up}`") # Iterate over groups and add warning for parameters without a description for group_key in self.schema_obj.schema["definitions"].keys(): group = self.schema_obj.schema["definitions"][group_key] - for param_key in group["properties"].keys(): - param = group["properties"][param_key] - if not "description" in param.keys(): - warned.append(f"No description provided in schema for parameter '{param_key}'") + for param_key, param in group["properties"].items(): + if "description" not in param.keys(): + warned.append(f"No description provided in schema for parameter: `{param_key}`") return {"passed": passed, "warned": warned, "failed": failed} From 2dff23ba19deb7407740843e028da0869ae07888 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Tue, 4 May 2021 09:05:13 +0200 Subject: [PATCH 175/210] add support for ignoring params --- nf_core/lint/schema_description.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/nf_core/lint/schema_description.py b/nf_core/lint/schema_description.py index 0e517e5c71..429a77a0c9 100644 --- a/nf_core/lint/schema_description.py +++ b/nf_core/lint/schema_description.py @@ -16,6 +16,7 @@ def schema_description(self): passed = [] warned = [] failed = [] + ignored = [] # First, get the top-level config options for the pipeline # Schema object already created in the `schema_lint` test @@ -25,17 +26,27 @@ def schema_description(self): self.schema_obj.no_prompts = True self.schema_obj.load_lint_schema() + # Get parameters that should be ignored according to the linting config + ignore_params = self.lint_config.get("schema_description", []) + # Get ungrouped params if "properties" in self.schema_obj.schema.keys(): ungrouped_params = self.schema_obj.schema["properties"].keys() for up in ungrouped_params: + if up in ignore_params: + continue warned.append(f"Ungrouped param in schema: `{up}`") # Iterate over groups and add warning for parameters without a description for group_key in self.schema_obj.schema["definitions"].keys(): group = self.schema_obj.schema["definitions"][group_key] for param_key, param in group["properties"].items(): + if param_key in ignore_params: + continue if "description" not in param.keys(): warned.append(f"No description provided in schema for parameter: `{param_key}`") + for ip in ignore_params: + ignored.append(f"Parameter is ignored: `{ip}`") + return {"passed": passed, "warned": warned, "failed": failed} From 3a639f80b42be1d51c9805b00289bbeb1074ad13 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 4 May 2021 22:04:17 +0200 Subject: [PATCH 176/210] Remove special nxf 21.03.0-edge versions Now that the stable 21.04.0 Nextflow release is out, we can revert to using the latest stable version of Nextflow for CI tests etc. --- .github/workflows/create-lint-wf.yml | 2 -- .github/workflows/pytest.yml | 2 -- .github/workflows/sync.yml | 3 --- nf_core/bump_version.py | 4 ++-- nf_core/pipeline-template/.github/workflows/ci.yml | 2 +- 5 files changed, 3 insertions(+), 10 deletions(-) diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 3432a8f9f4..7b26731994 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -4,8 +4,6 @@ on: [push, pull_request] jobs: MakeTestWorkflow: runs-on: ubuntu-latest - env: - NXF_VER: 21.03.0-edge steps: - uses: actions/checkout@v2 name: Check out source-code repository diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b9c6381423..0033c894bd 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -12,8 +12,6 @@ on: jobs: pytest: runs-on: ubuntu-latest - env: - NXF_VER: 21.03.0-edge strategy: matrix: python-version: [3.6, 3.7, 3.8, 3.9] diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index d958fbee08..6d5bd24c6b 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -4,9 +4,6 @@ on: types: [published] workflow_dispatch: -env: - NXF_VER: 21.03.0-edge - jobs: get-pipelines: runs-on: ubuntu-latest diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index b770cb2e65..28e3f9eeaa 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -145,8 +145,8 @@ def bump_nextflow_version(pipeline_obj, new_version): pipeline_obj, [ ( - r"nxf_ver: \[[\'\"]?{}[\'\"]?, '21.03.0-edge'\]".format(current_version.replace(".", r"\.")), - "nxf_ver: ['{}', '21.03.0-edge']".format(new_version), + r"nxf_ver: \[[\'\"]?{}[\'\"]?, ''\]".format(current_version.replace(".", r"\.")), + "nxf_ver: ['{}', '']".format(new_version), ) ], ) diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 2f387022ee..0228228801 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: strategy: matrix: # Nextflow versions: check pipeline minimum and current latest - nxf_ver: ['20.04.0', '21.03.0-edge'] + nxf_ver: ['20.04.0', ''] steps: - name: Check out pipeline code uses: actions/checkout@v2 From 360eb0e66486a6a7d581f9591e6ee19649db0046 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 4 May 2021 22:06:46 +0200 Subject: [PATCH 177/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8104d5e1dc..9af7a9c2b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ * Merge markers lint test - ignore binary files, allow config to ignore specific files [[#1040](https://github.com/nf-core/tools/pull/1040)] * New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] * New modules lint test comparing the `functions.nf` file to the template version +* Use latest stable Nextflow version `21.04.0` for CI tests instead of the `-edge` release ### Template From 535e46bfdde3687aee101beabf900f1358f3f8a4 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 5 May 2021 11:35:29 +0200 Subject: [PATCH 178/210] Add back NXF_EDGE env var in workflows, commented out --- .github/workflows/create-lint-wf.yml | 3 +++ .github/workflows/pytest.yml | 3 +++ .github/workflows/sync.yml | 3 +++ nf_core/pipeline-template/.github/workflows/ci.yml | 3 +++ 4 files changed, 12 insertions(+) diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 7b26731994..68691263b7 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -1,6 +1,9 @@ name: Create a pipeline and lint it on: [push, pull_request] +# Uncomment if we need an edge release of Nextflow again +# env: NXF_EDGE: 1 + jobs: MakeTestWorkflow: runs-on: ubuntu-latest diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 0033c894bd..263a4f5659 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -9,6 +9,9 @@ on: paths: - "**.py" +# Uncomment if we need an edge release of Nextflow again +# env: NXF_EDGE: 1 + jobs: pytest: runs-on: ubuntu-latest diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 6d5bd24c6b..a0beb23636 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -4,6 +4,9 @@ on: types: [published] workflow_dispatch: +# Uncomment if we need an edge release of Nextflow again +# env: NXF_EDGE: 1 + jobs: get-pipelines: runs-on: ubuntu-latest diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 0228228801..2c9c12aa51 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -8,6 +8,9 @@ on: release: types: [published] +# Uncomment if we need an edge release of Nextflow again +# env: NXF_EDGE: 1 + jobs: test: name: Run workflow tests From 4f75b6a9e669ad3625ce51120205628447037883 Mon Sep 17 00:00:00 2001 From: Kevin Menden Date: Wed, 5 May 2021 13:55:20 +0200 Subject: [PATCH 179/210] Apply suggestions from code review Co-authored-by: Phil Ewels --- nf_core/lint/schema_description.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/nf_core/lint/schema_description.py b/nf_core/lint/schema_description.py index 429a77a0c9..f1377053e2 100644 --- a/nf_core/lint/schema_description.py +++ b/nf_core/lint/schema_description.py @@ -15,7 +15,6 @@ def schema_description(self): """ passed = [] warned = [] - failed = [] ignored = [] # First, get the top-level config options for the pipeline @@ -34,14 +33,16 @@ def schema_description(self): ungrouped_params = self.schema_obj.schema["properties"].keys() for up in ungrouped_params: if up in ignore_params: - continue - warned.append(f"Ungrouped param in schema: `{up}`") + ignored.append(f"Ignored ungrouped param in schema: `{up}`") + else: + warned.append(f"Ungrouped param in schema: `{up}`") # Iterate over groups and add warning for parameters without a description for group_key in self.schema_obj.schema["definitions"].keys(): group = self.schema_obj.schema["definitions"][group_key] for param_key, param in group["properties"].items(): if param_key in ignore_params: + ignored.append(f"Ignoring description check for param in schema: `{param_key}`") continue if "description" not in param.keys(): warned.append(f"No description provided in schema for parameter: `{param_key}`") @@ -49,4 +50,4 @@ def schema_description(self): for ip in ignore_params: ignored.append(f"Parameter is ignored: `{ip}`") - return {"passed": passed, "warned": warned, "failed": failed} + return {"passed": passed, "warned": warned, "ignored": ignored} From 2f7805d5e79a2418f5581ee8281bf99ed8c7afd0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 00:09:56 +0200 Subject: [PATCH 180/210] Singularity download - don't add hyphen --- CHANGELOG.md | 1 + nf_core/download.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 40f761a849..64adb69690 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ * New modules lint test comparing the `functions.nf` file to the template version * Use latest stable Nextflow version `21.04.0` for CI tests instead of the `-edge` release * Added temporary fix to remove warnings about params that get converted from camelCase to camel-case [[#1035](https://github.com/nf-core/tools/issues/1035)] +* Fix bug in `nf-core download` where image names were getting a hyphen in `nf-core` which was breaking things. ### Template diff --git a/nf_core/download.py b/nf_core/download.py index a5e0a88e6e..24e982d018 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -503,8 +503,6 @@ def singularity_image_filenames(self, container): out_name = out_name[:-4] # Strip : and / characters out_name = out_name.replace("/", "-").replace(":", "-") - # Stupid Docker Hub not allowing hyphens - out_name = out_name.replace("nfcore", "nf-core") # Add file extension out_name = out_name + extension From c18db089e71c7c23beb6727f37e8e8faa66de5b9 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 22:21:48 +0200 Subject: [PATCH 181/210] Don't clear entire requests_cache cache when polling the website This is better as we don't lose the cache from other calls, but we do still get fresh data from the website poll. As a side-effect, we lose the INFO log messages which closes nf-core/tools#1029 --- nf_core/utils.py | 50 ++++++++++++++++++++++++------------------------ 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 8fdc474ce9..313c545e5d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -329,36 +329,36 @@ def poll_nfcore_web_api(api_url, post_data=None): Expects API reponse to be valid JSON and contain a top-level 'status' key. """ - # Clear requests_cache so that we get the updated statuses - requests_cache.clear() - try: - if post_data is None: - response = requests.get(api_url, headers={"Cache-Control": "no-cache"}) - else: - response = requests.post(url=api_url, data=post_data) - except (requests.exceptions.Timeout): - raise AssertionError("URL timed out: {}".format(api_url)) - except (requests.exceptions.ConnectionError): - raise AssertionError("Could not connect to URL: {}".format(api_url)) - else: - if response.status_code != 200: - log.debug("Response content:\n{}".format(response.content)) - raise AssertionError( - "Could not access remote API results: {} (HTML {} Error)".format(api_url, response.status_code) - ) + # Run without requests_cache so that we get the updated statuses + with requests_cache.disabled(): + try: + if post_data is None: + response = requests.get(api_url, headers={"Cache-Control": "no-cache"}) + else: + response = requests.post(url=api_url, data=post_data) + except (requests.exceptions.Timeout): + raise AssertionError("URL timed out: {}".format(api_url)) + except (requests.exceptions.ConnectionError): + raise AssertionError("Could not connect to URL: {}".format(api_url)) else: - try: - web_response = json.loads(response.content) - assert "status" in web_response - except (json.decoder.JSONDecodeError, AssertionError, TypeError) as e: + if response.status_code != 200: log.debug("Response content:\n{}".format(response.content)) raise AssertionError( - "nf-core website API results response not recognised: {}\n See verbose log for full response".format( - api_url - ) + "Could not access remote API results: {} (HTML {} Error)".format(api_url, response.status_code) ) else: - return web_response + try: + web_response = json.loads(response.content) + assert "status" in web_response + except (json.decoder.JSONDecodeError, AssertionError, TypeError) as e: + log.debug("Response content:\n{}".format(response.content)) + raise AssertionError( + "nf-core website API results response not recognised: {}\n See verbose log for full response".format( + api_url + ) + ) + else: + return web_response def anaconda_package(dep, dep_channels=["conda-forge", "bioconda", "defaults"]): From 82b2d5bb37f7f6a3901ea2103482a54edc43820a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 22:24:09 +0200 Subject: [PATCH 182/210] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a250b2d626..3391a1596a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ * Added fix to remove warnings about params that get converted from camelCase to camel-case [[#1035](https://github.com/nf-core/tools/issues/1035)] * Use latest stable Nextflow version `21.04.0` for CI tests instead of the `-edge` release * Fix bug in `nf-core download` where image names were getting a hyphen in `nf-core` which was breaking things. +* Improve API caching code when polling the website, fixes noisy log message when waiting for a response [[#1029](https://github.com/nf-core/tools/issues/1029)] ### Template From 3d74a90483f0ebe870346a10df88d00c9297551e Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 22:25:12 +0200 Subject: [PATCH 183/210] Black update - docstring style --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 313c545e5d..8fa57b655a 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -554,7 +554,7 @@ def write_line_break(self, data=None): def is_file_binary(path): - """ Check file path to see if it is a binary file """ + """Check file path to see if it is a binary file""" binary_ftypes = ["image", "application/java-archive", "application/x-java-archive"] binary_extensions = [".jpeg", ".jpg", ".png", ".zip", ".gz", ".jar", ".tar"] From 8d4fc0e3400100e36c4a439035fa6f9b4a07c6d0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 22:58:29 +0200 Subject: [PATCH 184/210] nf_core.utils.get_repo_releases_branches() - return pipeline name. Sometimes we add the nf-core/ prefix to the pipeline name, so return that too. --- nf_core/download.py | 4 +++- nf_core/launch.py | 4 +++- nf_core/utils.py | 3 ++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 591fcad29c..78c6666102 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -114,7 +114,9 @@ def download_workflow(self): # Get workflow details try: self.prompt_pipeline_name() - self.wf_releases, self.wf_branches = nf_core.utils.get_repo_releases_branches(self.pipeline, self.wfs) + self.pipeline, self.wf_releases, self.wf_branches = nf_core.utils.get_repo_releases_branches( + self.pipeline, self.wfs + ) self.prompt_release() self.get_release_hash() self.prompt_container_download() diff --git a/nf_core/launch.py b/nf_core/launch.py index 8547554ca5..5570015987 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -201,7 +201,9 @@ def get_pipeline_schema(self): if not self.pipeline_revision: try: - wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches(self.pipeline, self.wfs) + self.pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches( + self.pipeline, self.wfs + ) except AssertionError as e: log.error(e) return False diff --git a/nf_core/utils.py b/nf_core/utils.py index 7724cedb15..f40683cbd2 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -711,4 +711,5 @@ def get_repo_releases_branches(pipeline, wfs): ): wf_branches[branch["name"]] = branch["commit"]["sha"] - return wf_releases, wf_branches + # Return pipeline again in case we added the nf-core/ prefix + return pipeline, wf_releases, wf_branches From 5478af857e1d3e1172a1401f40c084106feb3047 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 23:04:38 +0200 Subject: [PATCH 185/210] Update pytests --- tests/test_download.py | 101 +++++++++++++++++++++++++---------------- tests/test_utils.py | 35 ++++++++++++++ 2 files changed, 98 insertions(+), 38 deletions(-) diff --git a/tests/test_download.py b/tests/test_download.py index 0009a19744..c12bafe758 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -18,56 +18,81 @@ class DownloadTest(unittest.TestCase): # - # Tests for 'fetch_workflow_details()' - # - def test_fetch_workflow_details_for_nf_core(self): - download_obj = DownloadWorkflow(pipeline="methylseq") - download_obj.fetch_workflow_details() - assert download_obj.wf_name == "nf-core/methylseq" - for r in download_obj.wf_releases: - if r.get("tag_name") == "1.6": - break - else: - raise AssertionError("Release 1.6 not found") - assert "dev" in download_obj.wf_branches.keys() - - def test_fetch_workflow_details_for_not_nf_core(self): - download_obj = DownloadWorkflow(pipeline="ewels/MultiQC") - download_obj.fetch_workflow_details() - assert download_obj.wf_name == "ewels/MultiQC" - for r in download_obj.wf_releases: - if r.get("tag_name") == "v1.10": - break - else: - raise AssertionError("MultiQC release v1.10 not found") - assert "master" in download_obj.wf_branches.keys() - - @pytest.mark.xfail(raises=LookupError, strict=True) - def test_fetch_workflow_details_not_exists(self): - download_obj = DownloadWorkflow(pipeline="made_up_pipeline") - download_obj.fetch_workflow_details() - - @pytest.mark.xfail(raises=LookupError, strict=True) - def test_fetch_workflow_details_not_exists_slash(self): - download_obj = DownloadWorkflow(pipeline="made-up/pipeline") - download_obj.fetch_workflow_details() + # Tests for 'get_release_hash' + # + def test_get_release_hash_release(self): + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + pipeline = "methylseq" + download_obj = DownloadWorkflow(pipeline=pipeline, release="1.6") + ( + download_obj.pipeline, + download_obj.wf_releases, + download_obj.wf_branches, + ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) + download_obj.get_release_hash() + assert download_obj.wf_sha == "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" + assert download_obj.outdir == "nf-core-methylseq-1.6" + assert ( + download_obj.wf_download_url + == "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" + ) + + def test_get_release_hash_branch(self): + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + # Exoseq pipeline is archived, so `dev` branch should be stable + pipeline = "exoseq" + download_obj = DownloadWorkflow(pipeline=pipeline, release="dev") + ( + download_obj.pipeline, + download_obj.wf_releases, + download_obj.wf_branches, + ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) + download_obj.get_release_hash() + assert download_obj.wf_sha == "819cbac792b76cf66c840b567ed0ee9a2f620db7" + assert download_obj.outdir == "nf-core-exoseq-dev" + assert ( + download_obj.wf_download_url + == "https://github.com/nf-core/exoseq/archive/819cbac792b76cf66c840b567ed0ee9a2f620db7.zip" + ) + + @pytest.mark.xfail(raises=AssertionError, strict=True) + def test_get_release_hash_non_existent_release(self): + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + pipeline = "methylseq" + download_obj = DownloadWorkflow(pipeline=pipeline, release="thisisfake") + ( + download_obj.pipeline, + download_obj.wf_releases, + download_obj.wf_branches, + ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) + download_obj.get_release_hash() # # Tests for 'download_wf_files' # def test_download_wf_files(self): - download_obj = DownloadWorkflow(pipeline="dummy", release="1.2.0", outdir=tempfile.mkdtemp()) - download_obj.wf_name = "nf-core/methylseq" - download_obj.wf_sha = "1.0" - download_obj.wf_download_url = "https://github.com/nf-core/methylseq/archive/1.0.zip" + outdir = tempfile.mkdtemp() + download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", release="1.6") + download_obj.outdir = outdir + download_obj.wf_sha = "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" + download_obj.wf_download_url = ( + "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" + ) download_obj.download_wf_files() + assert os.path.exists(os.path.join(outdir, "workflow", "main.nf")) # # Tests for 'download_configs' # def test_download_configs(self): - download_obj = DownloadWorkflow(pipeline="dummy", release="1.2.0", outdir=tempfile.mkdtemp()) + outdir = tempfile.mkdtemp() + download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", release="1.6") + download_obj.outdir = outdir download_obj.download_configs() + assert os.path.exists(os.path.join(outdir, "configs", "nfcore_custom.config")) # # Tests for 'wf_use_local_configs' diff --git a/tests/test_utils.py b/tests/test_utils.py index c6947861c7..e016f14abe 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,6 +3,7 @@ """ import nf_core.create +import nf_core.list import nf_core.utils import mock @@ -132,3 +133,37 @@ def test_pip_erroneous_package(self): """Tests the PyPi API package information query""" with pytest.raises(ValueError): nf_core.utils.pip_package("not_a_package=1.0") + + def test_get_repo_releases_branches_nf_core(self): + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("methylseq", wfs) + for r in wf_releases: + if r.get("tag_name") == "1.6": + break + else: + raise AssertionError("Release 1.6 not found") + assert "dev" in wf_branches.keys() + + def test_get_repo_releases_branches_not_nf_core(self): + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("ewels/MultiQC", wfs) + for r in wf_releases: + if r.get("tag_name") == "v1.10": + break + else: + raise AssertionError("MultiQC release v1.10 not found") + assert "master" in wf_branches.keys() + + @pytest.mark.xfail(raises=AssertionError, strict=True) + def test_get_repo_releases_branches_not_exists(self): + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("made_up_pipeline", wfs) + + @pytest.mark.xfail(raises=AssertionError, strict=True) + def test_get_repo_releases_branches_not_exists_slash(self): + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("made-up/pipeline", wfs) From 73fe7f420b6058be542193350157563ff623606b Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 23:43:07 +0200 Subject: [PATCH 186/210] Download - update readme docs --- README.md | 63 +++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 40 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index 67f55fb6e0..670ffebd63 100644 --- a/README.md +++ b/README.md @@ -344,15 +344,14 @@ Sometimes you may need to run an nf-core pipeline on a server or HPC system that In this case you will need to fetch the pipeline files first, then manually transfer them to your system. To make this process easier and ensure accurate retrieval of correctly versioned code and software containers, we have written a download helper tool. -Simply specify the name of the nf-core pipeline and it will be downloaded to your current working directory. -By default, the pipeline will download the pipeline code and the [institutional nf-core/configs](https://github.com/nf-core/configs) files. -If you specify the flag `--singularity`, it will also download any singularity image files that are required. +The `nf-core download` command will download both the pipeline code and the [institutional nf-core/configs](https://github.com/nf-core/configs) files. It can also optionally download any singularity image files that are required. -Use `-r`/`--release` to download a specific release of the pipeline. If not specified, the tool will automatically fetch the latest release. +If run without any arguments, the download tool will interactively prompt you for the required information. +Each option has a flag, if all are supplied then it will run without any user input needed. ```console -$ nf-core download rnaseq -r 3.0 --singularity +$ nf-core download ,--./,-. ___ __ __ __ ___ /,-._.--~\ @@ -360,33 +359,46 @@ $ nf-core download rnaseq -r 3.0 --singularity | \| | \__, \__/ | \ |___ \`-._,-`-, `._,._,' - nf-core/tools version 1.13 + nf-core/tools version 1.14 + + +Specify the name of a nf-core pipeline or a GitHub repository name (user/repo). +? Pipeline name: rnaseq +? Select release / branch: 3.0 [release] +In addition to the pipeline code, this tool can download software containers. +? Download software container images: singularity +Nextflow and nf-core can use an environment variable called $NXF_SINGULARITY_CACHEDIR that is a path to a directory where remote Singularity +images are stored. This allows downloaded images to be cached in a central location. +? Define $NXF_SINGULARITY_CACHEDIR for a shared Singularity image download folder? [y/n]: y +? Specify the path: cachedir/ -INFO Saving rnaseq +So that $NXF_SINGULARITY_CACHEDIR is always defined, you can add it to your ~/.bashrc file. This will then be autmoatically set every time you open a new terminal. We can add the following line to this file for you: +export NXF_SINGULARITY_CACHEDIR="/path/to/demo/cachedir" +? Add to ~/.bashrc ? [y/n]: n + +If transferring the downloaded files to another system, it can be convenient to have everything compressed in a single file. +This is not recommended when downloading Singularity images, as it can take a long time and saves very little space. +? Choose compression type: none +INFO Saving 'nf-core/rnaseq Pipeline release: '3.0' - Pull singularity containers: 'Yes' - Output file: 'nf-core-rnaseq-3.0.tar.gz' + Pull containers: 'singularity' + Using $NXF_SINGULARITY_CACHEDIR': /path/to/demo/cachedir + Output directory: 'nf-core-rnaseq-3.0' INFO Downloading workflow files from GitHub INFO Downloading centralised configs from GitHub -INFO Fetching container names for workflow INFO Found 29 containers -INFO Tip: Set env var $NXF_SINGULARITY_CACHEDIR to use a central cache for container downloads Downloading singularity images ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% • 29/29 completed INFO Compressing download.. INFO Command to extract files: tar -xzf nf-core-rnaseq-3.0.tar.gz INFO MD5 checksum for nf-core-rnaseq-3.0.tar.gz: 9789a9e0bda50f444ab0ee69cc8a95ce ``` -The tool automatically compresses all of the resulting file in to a `.tar.gz` archive. -You can choose other formats (`.tar.bz2`, `zip`) or to not compress (`none`) with the `-c`/`--compress` flag. -The console output provides the command you need to extract the files. - -Once uncompressed, you will see something like the following file structure for the downloaded pipeline: +Once downloaded, you will see something like the following file structure for the downloaded pipeline: ```console -$ tree -L 2 nf-core-methylseq-1.4/ +$ tree -L 2 nf-core-rnaseq-3.0/ nf-core-rnaseq-3.0 ├── configs @@ -404,7 +416,11 @@ nf-core-rnaseq-3.0 └── main.nf ``` -You can run the pipeline by simply providing the directory path for the `workflow` folder to your `nextflow run` command. +You can run the pipeline by simply providing the directory path for the `workflow` folder to your `nextflow run` command: + +```bash +nextflow run /path/to/download/nf-core-rnaseq-3.0/workflow/ --input mydata.csv # usual parameters here +``` ### Downloaded nf-core configs @@ -414,7 +430,7 @@ So using `-profile ` should work if available within [nf-core/configs](htt ### Downloading singularity containers If you're using Singularity, the `nf-core download` command can also fetch the required Singularity container images for you. -To do this, specify the `--singularity` option. +To do this, select `singularity` in the prompt or specify `--container singularity` in the command. Your archive / target output directory will then include three folders: `workflow`, `configs` and also `singularity-containers`. The downloaded workflow files are again edited to add the following line to the end of the pipeline's `nextflow.config` file: @@ -433,7 +449,8 @@ We highly recommend setting the `$NXF_SINGULARITY_CACHEDIR` environment variable If found, the tool will fetch the Singularity images to this directory first before copying to the target output archive / directory. Any images previously fetched will be found there and copied directly - this includes images that may be shared with other pipelines or previous pipeline version downloads or download attempts. -If you are running the download on the same system where you will be running the pipeline (eg. a shared filesystem where Nextflow won't have an internet connection at a later date), you can choose specify `--singularity-cache`. +If you are running the download on the same system where you will be running the pipeline (eg. a shared filesystem where Nextflow won't have an internet connection at a later date), you can choose to _only_ use the cache via a prompt or cli options `--singularity-cache-only` / `--singularity-cache-copy`. + This instructs `nf-core download` to fetch all Singularity images to the `$NXF_SINGULARITY_CACHEDIR` directory but does _not_ copy them to the workflow archive / directory. The workflow config file is _not_ edited. This means that when you later run the workflow, Nextflow will just use the cache folder directly. @@ -451,15 +468,15 @@ Where both are found, the download URL is preferred. Once a full list of containers is found, they are processed in the following order: -1. If the target image already exists, nothing is done (eg. with `$NXF_SINGULARITY_CACHEDIR` and `--singularity-cache` specified) -2. If found in `$NXF_SINGULARITY_CACHEDIR` and `--singularity-cache` is _not_ specified, they are copied to the output directory +1. If the target image already exists, nothing is done (eg. with `$NXF_SINGULARITY_CACHEDIR` and `--singularity-cache-only` specified) +2. If found in `$NXF_SINGULARITY_CACHEDIR` and `--singularity-cache-only` is _not_ specified, they are copied to the output directory 3. If they start with `http` they are downloaded directly within Python (default 4 at a time, you can customise this with `--parallel-downloads`) 4. If they look like a Docker image name, they are fetched using a `singularity pull` command * This requires Singularity to be installed on the system and is substantially slower Note that compressing many GBs of binary files can be slow, so specifying `--compress none` is recommended when downloading Singularity images. -If you really like hammering your internet connection, you can set `--parallel-downloads` to a large number to download loads of images at once. +If the download speeds are much slower than your internet connection is capable of, you can set `--parallel-downloads` to a large number to download loads of images at once. ## Pipeline software licences From 205340bd24c2a5c7650cd1d70ecd7b3a82592c40 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 6 May 2021 23:46:39 +0200 Subject: [PATCH 187/210] Download docs - didn't compress --- README.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/README.md b/README.md index 670ffebd63..8e7b7206bb 100644 --- a/README.md +++ b/README.md @@ -389,10 +389,7 @@ INFO Saving 'nf-core/rnaseq INFO Downloading workflow files from GitHub INFO Downloading centralised configs from GitHub INFO Found 29 containers -Downloading singularity images ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% • 29/29 completed -INFO Compressing download.. -INFO Command to extract files: tar -xzf nf-core-rnaseq-3.0.tar.gz -INFO MD5 checksum for nf-core-rnaseq-3.0.tar.gz: 9789a9e0bda50f444ab0ee69cc8a95ce +Downloading singularity images ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% • 29/29 completed ``` Once downloaded, you will see something like the following file structure for the downloaded pipeline: From 19f2d5c1efe19d3cfc64a9ba0a41be84718c1118 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 7 May 2021 20:21:33 +0200 Subject: [PATCH 188/210] Address review comments for singularty cachedir prompt --- nf_core/download.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 0ce7891ca7..1e567d340a 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -261,12 +261,11 @@ def prompt_use_singularity_cachedir(self): # Prompt user for a cache directory path cachedir_path = None while cachedir_path is None: - cachedir_path = os.path.abspath( - questionary.path( - "Specify the path:", only_directories=True, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - ) - if cachedir_path == os.path.abspath(""): + prompt_cachedir_path = questionary.path( + "Specify the path:", only_directories=True, style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + cachedir_path = os.path.abspath(os.path.expanduser(prompt_cachedir_path)) + if prompt_cachedir_path == "": log.error(f"Not using [blue]$NXF_SINGULARITY_CACHEDIR[/]") cachedir_path = False elif not os.path.isdir(cachedir_path): From dd64f8c86f3ad502723b602c598f74442ca02869 Mon Sep 17 00:00:00 2001 From: Kevin Menden Date: Mon, 10 May 2021 10:29:28 +0200 Subject: [PATCH 189/210] Update CHANGELOG.md --- CHANGELOG.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 166d53cfff..2ede3ea15f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,11 +17,8 @@ * Merge markers lint test - ignore binary files, allow config to ignore specific files [[#1040](https://github.com/nf-core/tools/pull/1040)] * New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] * New modules lint test comparing the `functions.nf` file to the template version -<<<<<<< HEAD * Modules installed from alternative sources are put in folders based on the name of the source repository -======= * Added temporary fix to remove warnings about params that get converted from camelCase to camel-case [[#1035](https://github.com/nf-core/tools/issues/1035)] ->>>>>>> master ### Template From 35c9e3ceefaee6ab7db46c56f8b2f5ab9c67c784 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 11:10:48 +0200 Subject: [PATCH 190/210] ignored maps in params_used check --- nf_core/lint/params_used.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/lint/params_used.py b/nf_core/lint/params_used.py index e58bf04be3..e14ecc156f 100644 --- a/nf_core/lint/params_used.py +++ b/nf_core/lint/params_used.py @@ -28,6 +28,8 @@ def params_used(self): if cf in ignore_params: ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) continue + if cf.count(".") > 1: + continue if cf in main_nf: passed.append("Config variable found in `main.nf`: {}".format(self._wrap_quotes(cf))) else: From 078fde1861ac0969cbbcdec4a05888bfca517934 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 12:07:53 +0200 Subject: [PATCH 191/210] add new version to template --- nf_core/bump_version.py | 8 +++++++- nf_core/pipeline-template/README.md | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 28e3f9eeaa..ef2068a65f 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -159,7 +159,13 @@ def bump_nextflow_version(pipeline_obj, new_version): ( r"nextflow-%E2%89%A5{}-brightgreen.svg".format(current_version.replace(".", r"\.")), "nextflow-%E2%89%A5{}-brightgreen.svg".format(new_version), - ) + ), + ( + r"1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>={}`)".format( + current_version.replace(".", r"\.") + ), + "1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>={}`)".format(new_version), + ), ], ) diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 9adcfb0aac..311472a287 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -19,7 +19,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool ## Quick Start -1. Install [`nextflow`](https://nf-co.re/usage/installation) +1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=21.04.0`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ From 7282681fe3832f8b71978265ab023f0e7323f084 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 12:10:33 +0200 Subject: [PATCH 192/210] added debug message --- nf_core/lint/params_used.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nf_core/lint/params_used.py b/nf_core/lint/params_used.py index e14ecc156f..046d20221b 100644 --- a/nf_core/lint/params_used.py +++ b/nf_core/lint/params_used.py @@ -1,6 +1,9 @@ #!/usr/bin/env python import os +import logging + +log = logging.getLogger(__name__) def params_used(self): @@ -29,6 +32,7 @@ def params_used(self): ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) continue if cf.count(".") > 1: + log.debug(f"Ignoring nested param: {cf}") continue if cf in main_nf: passed.append("Config variable found in `main.nf`: {}".format(self._wrap_quotes(cf))) From 92d5b1a9679aaa4beb2958c11c38d568d77c13d3 Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Mon, 10 May 2021 11:22:50 +0100 Subject: [PATCH 193/210] Changed install directory to external --- nf_core/modules/pipeline_modules.py | 12 ++++++++---- tests/test_modules.py | 4 ++-- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/nf_core/modules/pipeline_modules.py b/nf_core/modules/pipeline_modules.py index 3d9825dc8e..01d0115f30 100644 --- a/nf_core/modules/pipeline_modules.py +++ b/nf_core/modules/pipeline_modules.py @@ -219,8 +219,10 @@ def install(self, module=None): return False log.debug("Installing module '{}' at modules hash {}".format(module, self.modules_repo.modules_current_hash)) - # Extract origin repository to use as install folder - install_folder = self.modules_repo.name.split("/")[0] + # Set the install folder based on the repository name + install_folder = "nf-core" + if not self.modules_repo.name == "nf-core/modules": + install_folder = "external" # Check that we don't already have a folder for this module module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) @@ -262,8 +264,10 @@ def remove(self, module): "Tool name:", choices=self.pipeline_module_names, style=nf_core.utils.nfcore_question_style ).ask() - # Extract origin repository to use as install folder - install_folder = self.modules_repo.name.split("/")[0] + # Set the install folder based on the repository name + install_folder = "nf-core" + if not self.modules_repo.name == "nf-core/modules": + install_folder = "external" # Get the module directory module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) diff --git a/tests/test_modules.py b/tests/test_modules.py index 7ca4b0caed..6e73a43f1f 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -87,7 +87,7 @@ def test_modules_install_fastqc(self): def test_modules_install_fastqc_alternative_source(self): """Test installing a module from a different source repository - FastQC""" assert self.mods_alt.install("fastqc") is not False - module_path = os.path.join(self.mods.pipeline_dir, "modules", "ewels", "software", "fastqc") + module_path = os.path.join(self.mods.pipeline_dir, "modules", "external", "software", "fastqc") assert os.path.exists(module_path) def test_modules_install_fastqc_twice(self): @@ -105,7 +105,7 @@ def test_modules_remove_fastqc(self): def test_modules_remove_fastqc_alternative_source(self): """Test removing FastQC module after installing it from an alternative source""" self.mods_alt.install("fastqc") - module_path = os.path.join(self.mods.pipeline_dir, "modules", "ewels", "software", "fastqc") + module_path = os.path.join(self.mods.pipeline_dir, "modules", "external", "software", "fastqc") assert self.mods_alt.remove("fastqc") assert os.path.exists(module_path) is False From 5f8fdd6466c2e1219e402dec479332b7635ad182 Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Mon, 10 May 2021 11:24:12 +0100 Subject: [PATCH 194/210] black formatting --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 8fdc474ce9..700d4678ae 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -554,7 +554,7 @@ def write_line_break(self, data=None): def is_file_binary(path): - """ Check file path to see if it is a binary file """ + """Check file path to see if it is a binary file""" binary_ftypes = ["image", "application/java-archive", "application/x-java-archive"] binary_extensions = [".jpeg", ".jpg", ".png", ".zip", ".gz", ".jar", ".tar"] From 0d25637d5c9d93e103849ff195c4e26c515f311d Mon Sep 17 00:00:00 2001 From: Marc Jones Date: Mon, 10 May 2021 12:41:26 +0100 Subject: [PATCH 195/210] Flattened external module directory structure --- nf_core/modules/pipeline_modules.py | 15 ++++++++------- tests/test_modules.py | 4 ++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/nf_core/modules/pipeline_modules.py b/nf_core/modules/pipeline_modules.py index 01d0115f30..12f18bdb53 100644 --- a/nf_core/modules/pipeline_modules.py +++ b/nf_core/modules/pipeline_modules.py @@ -220,12 +220,12 @@ def install(self, module=None): log.debug("Installing module '{}' at modules hash {}".format(module, self.modules_repo.modules_current_hash)) # Set the install folder based on the repository name - install_folder = "nf-core" + install_folder = ["nf-core", "software"] if not self.modules_repo.name == "nf-core/modules": - install_folder = "external" + install_folder = ["external"] # Check that we don't already have a folder for this module - module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) + module_dir = os.path.join(self.pipeline_dir, "modules", *install_folder, module) if os.path.exists(module_dir): log.error("Module directory already exists: {}".format(module_dir)) # TODO: uncomment next line once update is implemented @@ -236,7 +236,8 @@ def install(self, module=None): files = self.modules_repo.get_module_file_urls(module) log.debug("Fetching module files:\n - {}".format("\n - ".join(files.keys()))) for filename, api_url in files.items(): - dl_filename = os.path.join(self.pipeline_dir, "modules", install_folder, filename) + split_filename = filename.split("/") + dl_filename = os.path.join(self.pipeline_dir, "modules", *install_folder, *split_filename[1:]) self.modules_repo.download_gh_file(dl_filename, api_url) log.info("Downloaded {} files to {}".format(len(files), module_dir)) @@ -265,12 +266,12 @@ def remove(self, module): ).ask() # Set the install folder based on the repository name - install_folder = "nf-core" + install_folder = ["nf-core", "software"] if not self.modules_repo.name == "nf-core/modules": - install_folder = "external" + install_folder = ["external"] # Get the module directory - module_dir = os.path.join(self.pipeline_dir, "modules", install_folder, "software", module) + module_dir = os.path.join(self.pipeline_dir, "modules", *install_folder, module) # Verify that the module is actually installed if not os.path.exists(module_dir): diff --git a/tests/test_modules.py b/tests/test_modules.py index 6e73a43f1f..8b2a2a8b10 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -87,7 +87,7 @@ def test_modules_install_fastqc(self): def test_modules_install_fastqc_alternative_source(self): """Test installing a module from a different source repository - FastQC""" assert self.mods_alt.install("fastqc") is not False - module_path = os.path.join(self.mods.pipeline_dir, "modules", "external", "software", "fastqc") + module_path = os.path.join(self.mods.pipeline_dir, "modules", "external", "fastqc") assert os.path.exists(module_path) def test_modules_install_fastqc_twice(self): @@ -105,7 +105,7 @@ def test_modules_remove_fastqc(self): def test_modules_remove_fastqc_alternative_source(self): """Test removing FastQC module after installing it from an alternative source""" self.mods_alt.install("fastqc") - module_path = os.path.join(self.mods.pipeline_dir, "modules", "external", "software", "fastqc") + module_path = os.path.join(self.mods.pipeline_dir, "modules", "external", "fastqc") assert self.mods_alt.remove("fastqc") assert os.path.exists(module_path) is False From 57abb0e6915351898e657a806554964fdb58f09c Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 10 May 2021 13:51:42 +0200 Subject: [PATCH 196/210] Add igenomes_base and igenomes_ignore to the params to ignore in params_used lint test --- nf_core/lint/params_used.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/lint/params_used.py b/nf_core/lint/params_used.py index 046d20221b..4c864a916a 100644 --- a/nf_core/lint/params_used.py +++ b/nf_core/lint/params_used.py @@ -15,6 +15,8 @@ def params_used(self): "params.config_profile_name", "params.show_hidden_params", "params.schema_ignore_params", + "params.igenomes_base", + "params.igenomes_ignore", ] ignore_params = self.lint_config.get("params_used", []) From 60840fc46db877b3157d331f64cbf83fa8bd9e4f Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 14:11:05 +0200 Subject: [PATCH 197/210] fixed ci.yml nxf version --- nf_core/bump_version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index ef2068a65f..31aa4ed79e 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -145,7 +145,7 @@ def bump_nextflow_version(pipeline_obj, new_version): pipeline_obj, [ ( - r"nxf_ver: \[[\'\"]?{}[\'\"]?, ''\]".format(current_version.replace(".", r"\.")), + r"nxf_ver: \[[\'\"]?{}[\'\"]?, [\'\"]?[\'\"]?\]".format(current_version.replace(".", r"\.")), "nxf_ver: ['{}', '']".format(new_version), ) ], @@ -161,7 +161,7 @@ def bump_nextflow_version(pipeline_obj, new_version): "nextflow-%E2%89%A5{}-brightgreen.svg".format(new_version), ), ( - r"1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>={}`)".format( + r"1.\s*Install\s*\[[\'\"]?nextflow[\'\"]?\]\(https://nf-co.re/usage/installation\)\s*\([\'\"]?>={}[\'\"]?\)".format( current_version.replace(".", r"\.") ), "1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>={}`)".format(new_version), From 9b4f716bf62aba615922e88dc9487b97de25976a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 10 May 2021 14:36:40 +0200 Subject: [PATCH 198/210] Bump version - refactor matching into loop --- nf_core/bump_version.py | 33 +++++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 31aa4ed79e..55332563fe 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -196,18 +196,31 @@ def update_file_version(filename, pipeline_obj, patterns): replacements = [] for pattern in patterns: - # Check that we have a match - matches_pattern = re.findall("^.*{}.*$".format(pattern[0]), content, re.MULTILINE) - if len(matches_pattern) == 0: - log.error("Could not find version number in {}: '{}'".format(filename, pattern)) - continue + found_match = False + + newcontent = [] + for line in content.splitlines(): + + # Match the pattern + matches_pattern = re.findall("^.*{}.*$".format(pattern[0]), line) + if matches_pattern: + found_match = True - # Replace the match - content = re.sub(pattern[0], pattern[1], content) - matches_newstr = re.findall("^.*{}.*$".format(pattern[1]), content, re.MULTILINE) + # Replace the match + newline = re.sub(pattern[0], pattern[1], line) + newcontent.append(newline) - # Save for logging - replacements.append((matches_pattern, matches_newstr)) + # Save for logging + replacements.append((line, newline)) + + # No match, keep line as it is + else: + newcontent.append(line) + + if found_match: + content = "\n".join(newcontent) + else: + log.error("Could not find version number in {}: '{}'".format(filename, pattern)) log.info("Updated version in '{}'".format(filename)) for replacement in replacements: From ce1c2c93eab6543eef8f21dd7855dfc351f4bc8d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 10 May 2021 14:40:40 +0200 Subject: [PATCH 199/210] Attempt to fix without testing locally --- nf_core/bump_version.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 55332563fe..89ff062d85 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -224,9 +224,8 @@ def update_file_version(filename, pipeline_obj, patterns): log.info("Updated version in '{}'".format(filename)) for replacement in replacements: - for idx, matched in enumerate(replacement[0]): - stderr.print(" [red] - {}".format(matched.strip()), highlight=False) - stderr.print(" [green] + {}".format(replacement[1][idx].strip()), highlight=False) + stderr.print(" [red] - {}".format(replacement[0].strip()), highlight=False) + stderr.print(" [green] + {}".format(replacement[1].strip()), highlight=False) stderr.print("\n") with open(fn, "w") as fh: From 2f7be42c6b957e5cc26934078cda382fbb8710ae Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 14:49:16 +0200 Subject: [PATCH 200/210] fix readme regex --- nf_core/bump_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 89ff062d85..d570090845 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -161,7 +161,7 @@ def bump_nextflow_version(pipeline_obj, new_version): "nextflow-%E2%89%A5{}-brightgreen.svg".format(new_version), ), ( - r"1.\s*Install\s*\[[\'\"]?nextflow[\'\"]?\]\(https://nf-co.re/usage/installation\)\s*\([\'\"]?>={}[\'\"]?\)".format( + r"1.\s*Install\s*\[`nextflow`\]\(https://nf-co.re/usage/installation\)\s*\(`>={}`\)".format( current_version.replace(".", r"\.") ), "1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>={}`)".format(new_version), From ced197d3f818b6626b8cf692946094d94b2fb32d Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 14:57:32 +0200 Subject: [PATCH 201/210] updated changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a250b2d626..b6dd3642c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] * Added `tool_name_underscore` to the module template to allow TOOL_SUBTOOL in `main.nf` [[#1011](https://github.com/nf-core/tools/issues/1011)] * Convert `d` to `day` in the `cleanParameters` function to make Duration objects like `2d` pass the validation [[#858](https://github.com/nf-core/tools/issues/858)] +* Added nextflow version to quick start section and adjusted `nf-core bump-version` [[#1032](https://github.com/nf-core/tools/issues/1032)] ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] From 2fa026512b6cf06eb029c9a9bfaac3a2b1741082 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 14:57:45 +0200 Subject: [PATCH 202/210] markdown --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6dd3642c6..24b6732e2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,7 +31,7 @@ * Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] * Added `tool_name_underscore` to the module template to allow TOOL_SUBTOOL in `main.nf` [[#1011](https://github.com/nf-core/tools/issues/1011)] * Convert `d` to `day` in the `cleanParameters` function to make Duration objects like `2d` pass the validation [[#858](https://github.com/nf-core/tools/issues/858)] -* Added nextflow version to quick start section and adjusted `nf-core bump-version` [[#1032](https://github.com/nf-core/tools/issues/1032)] +* Added nextflow version to quick start section and adjusted `nf-core bump-version` [[#1032](https://github.com/nf-core/tools/issues/1032)] ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] From a82eaecb80ed7c2d28883e99fd8090d997e2f36c Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 15:10:35 +0200 Subject: [PATCH 203/210] added regex example comments --- nf_core/bump_version.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index d570090845..2ac3fd4c0d 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -145,6 +145,7 @@ def bump_nextflow_version(pipeline_obj, new_version): pipeline_obj, [ ( + # example: nxf_ver: ['20.04.0', ''] r"nxf_ver: \[[\'\"]?{}[\'\"]?, [\'\"]?[\'\"]?\]".format(current_version.replace(".", r"\.")), "nxf_ver: ['{}', '']".format(new_version), ) @@ -161,6 +162,7 @@ def bump_nextflow_version(pipeline_obj, new_version): "nextflow-%E2%89%A5{}-brightgreen.svg".format(new_version), ), ( + # example: 1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=20.04.0`) r"1.\s*Install\s*\[`nextflow`\]\(https://nf-co.re/usage/installation\)\s*\(`>={}`\)".format( current_version.replace(".", r"\.") ), From 14e2e117bd7f64c6dfed46f6e259aa99f8f7cb32 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 15:31:09 +0200 Subject: [PATCH 204/210] remove question marks --- nf_core/bump_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 2ac3fd4c0d..9a15ace594 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -146,7 +146,7 @@ def bump_nextflow_version(pipeline_obj, new_version): [ ( # example: nxf_ver: ['20.04.0', ''] - r"nxf_ver: \[[\'\"]?{}[\'\"]?, [\'\"]?[\'\"]?\]".format(current_version.replace(".", r"\.")), + r"nxf_ver: \[[\'\"]{}[\'\"], [\'\"][\'\"]\]".format(current_version.replace(".", r"\.")), "nxf_ver: ['{}', '']".format(new_version), ) ], From 91962e1d7f58d9a801334181f3ce5f71ce1fb361 Mon Sep 17 00:00:00 2001 From: Kevin Menden Date: Mon, 10 May 2021 16:04:42 +0200 Subject: [PATCH 205/210] Update nf_core/bump_version.py Co-authored-by: Phil Ewels --- nf_core/bump_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 9a15ace594..f566215111 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -163,7 +163,7 @@ def bump_nextflow_version(pipeline_obj, new_version): ), ( # example: 1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=20.04.0`) - r"1.\s*Install\s*\[`nextflow`\]\(https://nf-co.re/usage/installation\)\s*\(`>={}`\)".format( + r"1.\s*Install\s*\[`nextflow`\]\(https://nf-co\.re/usage/installation\)\s*\(`>={}`\)".format( current_version.replace(".", r"\.") ), "1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>={}`)".format(new_version), From 26b4d613ba7d56b8ec3d447a57a24fea596eca48 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Mon, 10 May 2021 16:05:10 +0200 Subject: [PATCH 206/210] escape dot --- nf_core/bump_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index f566215111..70ac2312c2 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -163,7 +163,7 @@ def bump_nextflow_version(pipeline_obj, new_version): ), ( # example: 1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=20.04.0`) - r"1.\s*Install\s*\[`nextflow`\]\(https://nf-co\.re/usage/installation\)\s*\(`>={}`\)".format( + r"1\.\s*Install\s*\[`nextflow`\]\(https://nf-co\.re/usage/installation\)\s*\(`>={}`\)".format( current_version.replace(".", r"\.") ), "1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>={}`)".format(new_version), From cad39818785665f93ad100e830e0cd8f0c56142d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 10 May 2021 17:00:22 +0200 Subject: [PATCH 207/210] Bump to v1.14, reorganise changelog --- CHANGELOG.md | 61 ++++++++++++++++++++++++++++++++-------------------- setup.py | 2 +- 2 files changed, 39 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fd77d9463e..f8b72fb213 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,40 +1,55 @@ # nf-core/tools: Changelog -## 1.14dev +## [v1.14 - Brass Chicken :chicken:](https://github.com/nf-core/tools/releases/tag/1.14) - [2021-05-11] -### Tools +### Template -* Strip values from `nf-core launch` web response which are False and have no default in the schema [[#976](https://github.com/nf-core/tools/issues/976)] -* Try to fix the fix for the automated sync when we submit too many PRs at once [[#970](https://github.com/nf-core/tools/issues/970)] +* Fixed an issue regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] +* Removed trailing slash from `params.igenomes_base` to yield valid s3 paths (previous paths work with Nextflow but not aws cli) +* Added a timestamp to the trace + timetime + report + dag filenames to fix overwrite issue on AWS +* Rewrite the `params_summary_log()` function to properly ignore unset params and have nicer formatting [[#971](https://github.com/nf-core/tools/issues/971)] +* Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] +* Convert `d` to `day` in the `cleanParameters` function to make Duration objects like `2d` pass the validation [[#858](https://github.com/nf-core/tools/issues/858)] +* Added nextflow version to quick start section and adjusted `nf-core bump-version` [[#1032](https://github.com/nf-core/tools/issues/1032)] +* Use latest stable Nextflow version `21.04.0` for CI tests instead of the `-edge` release + +### Download + +* Fix bug in `nf-core download` where image names were getting a hyphen in `nf-core` which was breaking things. +* Extensive new interactive prompts for all command line flags [[#1027](https://github.com/nf-core/tools/issues/1027)] + * It is now recommended to run `nf-core download` without any cli options and follow prompts (though flags can be used to run non-interactively if you wish) +* New helper code to set `$NXF_SINGULARITY_CACHEDIR` and add to `.bashrc` if desired [[#1027](https://github.com/nf-core/tools/issues/1027)] + +### Launch + +* Strip values from `nf-core launch` web response which are `False` and have no default in the schema [[#976](https://github.com/nf-core/tools/issues/976)] +* Improve API caching code when polling the website, fixes noisy log message when waiting for a response [[#1029](https://github.com/nf-core/tools/issues/1029)] +* New interactive prompts for pipeline name [[#1027](https://github.com/nf-core/tools/issues/1027)] + +### Modules + +* Added `tool_name_underscore` to the module template to allow TOOL_SUBTOOL in `main.nf` [[#1011](https://github.com/nf-core/tools/issues/1011)] * Added `--conda-name` flag to `nf-core modules create` command to allow sidestepping questionary [[#988](https://github.com/nf-core/tools/issues/988)] * Extended `nf-core modules lint` functionality to check tags in `test.yml` and to look for a entry in the `pytest_software.yml` file * Update `modules` commands to use new test tag format `tool/subtool` -* Rewrite how the tools documentation is deployed to the website, to allow multiple versions -* Created new Docker image for the tools cli package - see installation docs for details [[#917](https://github.com/nf-core/tools/issues/917)] +* New modules lint test comparing the `functions.nf` file to the template version +* Modules installed from alternative sources are put in folders based on the name of the source repository + +### Linting + * Fix bug in nf-core lint config skipping for the `nextflow_config` test [[#1019](https://github.com/nf-core/tools/issues/1019)] * New `-k`/`--key` cli option for `nf-core lint` to allow you to run only named lint tests, for faster local debugging -* Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only HOME directory * Merge markers lint test - ignore binary files, allow config to ignore specific files [[#1040](https://github.com/nf-core/tools/pull/1040)] * New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] -* New modules lint test comparing the `functions.nf` file to the template version -* Modules installed from alternative sources are put in folders based on the name of the source repository -* Added temporary fix to remove warnings about params that get converted from camelCase to camel-case [[#1035](https://github.com/nf-core/tools/issues/1035)] -* Added lint checks for missing parameter description and parameters outside of groups [[#1017](https://github.com/nf-core/tools/issues/1017)] * Added fix to remove warnings about params that get converted from camelCase to camel-case [[#1035](https://github.com/nf-core/tools/issues/1035)] -* Use latest stable Nextflow version `21.04.0` for CI tests instead of the `-edge` release -* Fix bug in `nf-core download` where image names were getting a hyphen in `nf-core` which was breaking things. -* Improve API caching code when polling the website, fixes noisy log message when waiting for a response [[#1029](https://github.com/nf-core/tools/issues/1029)] +* Added pipeline schema lint checks for missing parameter description and parameters outside of groups [[#1017](https://github.com/nf-core/tools/issues/1017)] -### Template +### General -* Fixed an issue regarding explicit disabling of unused container engines [[#972](https://github.com/nf-core/tools/pull/972)] -* Removed trailing slash from `params.igenomes_base` to yield valid s3 paths (previous paths work with Nextflow but not aws cli) -* Added a timestamp to the trace + timetime + report + dag filenames to fix overwrite issue on AWS -* Rewrite the `params_summary_log()` function to properly ignore unset params and have nicer formatting [[#971](https://github.com/nf-core/tools/issues/971)] -* Fix overly strict `--max_time` formatting regex in template schema [[#973](https://github.com/nf-core/tools/issues/973)] -* Added `tool_name_underscore` to the module template to allow TOOL_SUBTOOL in `main.nf` [[#1011](https://github.com/nf-core/tools/issues/1011)] -* Convert `d` to `day` in the `cleanParameters` function to make Duration objects like `2d` pass the validation [[#858](https://github.com/nf-core/tools/issues/858)] -* Added nextflow version to quick start section and adjusted `nf-core bump-version` [[#1032](https://github.com/nf-core/tools/issues/1032)] +* Try to fix the fix for the automated sync when we submit too many PRs at once [[#970](https://github.com/nf-core/tools/issues/970)] +* Rewrite how the tools documentation is deployed to the website, to allow multiple versions +* Created new Docker image for the tools cli package - see installation docs for details [[#917](https://github.com/nf-core/tools/issues/917)] +* Ignore permission errors for setting up requests cache directories to allow starting with an invalid or read-only `HOME` directory ## [v1.13.3 - Copper Crocodile Resurrection :crocodile:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-24] diff --git a/setup.py b/setup.py index e18f437f03..8960df4de0 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -version = "1.14dev" +version = "1.14" with open("README.md") as f: readme = f.read() From 3096adc397ef0a9d2c45f47497c5cfe66a497367 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 11 May 2021 11:52:30 +0200 Subject: [PATCH 208/210] Fix review comments from @maxulysse in nf-core/tools#1066 --- CHANGELOG.md | 2 +- nf_core/pipeline-template/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8b72fb213..36ba54e3a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,7 +40,7 @@ * Fix bug in nf-core lint config skipping for the `nextflow_config` test [[#1019](https://github.com/nf-core/tools/issues/1019)] * New `-k`/`--key` cli option for `nf-core lint` to allow you to run only named lint tests, for faster local debugging * Merge markers lint test - ignore binary files, allow config to ignore specific files [[#1040](https://github.com/nf-core/tools/pull/1040)] -* New lint test to check if params in `nextflow config` are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] +* New lint test to check if all defined pipeline parameters are mentioned in `main.nf` [[#1038](https://github.com/nf-core/tools/issues/1038)] * Added fix to remove warnings about params that get converted from camelCase to camel-case [[#1035](https://github.com/nf-core/tools/issues/1035)] * Added pipeline schema lint checks for missing parameter description and parameters outside of groups [[#1017](https://github.com/nf-core/tools/issues/1017)] diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 311472a287..87c48a2c30 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -19,7 +19,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool ## Quick Start -1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=21.04.0`) +1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=20.04.0`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ From f07f7b74a77af0072176ba79d26e3c519316363d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 11 May 2021 12:00:52 +0200 Subject: [PATCH 209/210] Update tools.Dockerfile Co-authored-by: Maxime U. Garcia --- tools.Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools.Dockerfile b/tools.Dockerfile index ceb0da9235..8343e06ca1 100644 --- a/tools.Dockerfile +++ b/tools.Dockerfile @@ -24,6 +24,9 @@ RUN mkdir -p /usr/share/man/man1 \ && apt-get install -y openjdk-11-jre \ && apt-get clean -y && rm -rf /var/lib/apt/lists/* +# Setup ARG for NXF_VER ENV +ARG NXF_VER="" +ENV NXF_VER ${NXF_VER} # Install Nextflow RUN curl -s https://get.nextflow.io | bash \ && mv nextflow /usr/local/bin \ From d731d3dee1752d3b19a93801e5171733cfa8bc68 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 11 May 2021 12:03:00 +0200 Subject: [PATCH 210/210] Add docs for building docker image with specific NXF_VER --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 8e7b7206bb..ba57848a74 100644 --- a/README.md +++ b/README.md @@ -113,6 +113,14 @@ nf-core list You can use docker image tags to specify the version you would like to use. For example, `nfcore/tools:dev` for the latest development version of the code, or `nfcore/tools:1.14` for version `1.14` of tools. If you omit this, it will default to `:latest`, which should be the latest stable release. +If you need a specific version of Nextflow inside the container, you can build an image yourself. +Clone the repo locally and check out whatever version of nf-core/tools that you need. +Then build using the `--build-arg NXF_VER` flag as follows: + +```bash +docker build -t nfcore/tools:dev . --build-arg NXF_VER=20.04.0 +``` + ### Development version If you would like the latest development version of tools, the command is: