diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 498b086755..cdc354a0e6 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -53,7 +53,7 @@ jobs: run: nf-core --log-file log.txt lint --dir nf-core-testpipeline --fail-ignored --release - name: nf-core modules install - run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force --latest + run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force - name: Upload log file artifact if: ${{ always() }} diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 9f3fbdd90f..4e37bc07de 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -67,7 +67,7 @@ jobs: --from-branch dev \ --pull-request \ --username nf-core-bot \ - --repository nf-core/${{ matrix.pipeline }} + --github-repository nf-core/${{ matrix.pipeline }} - name: Upload sync log file artifact if: ${{ always() }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 6686a728d1..c49fa0212e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,32 @@ # nf-core/tools: Changelog +## [v2.1 - Zinc Zebra](https://github.com/nf-core/tools/releases/tag/2.1) - [2021-07-27] + +### Template + +* Correct regex pattern for file names in `nextflow_schema.json` +* Remove `.` from nf-core/tools command examples +* Update Nextflow installation link in pipeline template ([#1201](https://github.com/nf-core/tools/issues/1201)) +* Command `hostname` is not portable [[#1212](https://github.com/nf-core/tools/pull/1212)] +* Changed how singularity and docker links are written in template to avoid duplicate links + +### General + +* Changed names of some flags with `-r` as short options to make the flags more consistent between commands. + +### Modules + +* Added consistency checks between installed modules and `modules.json` ([#1199](https://github.com/nf-core/tools/issues/1199)) +* Added support excluding or specifying version of modules in `.nf-core.yml` when updating with `nf-core modules install --all` ([#1204](https://github.com/nf-core/tools/issues/1204)) +* Created `nf-core modules update` and removed updating options from `nf-core modules install` +* Added missing function call to `nf-core lint` ([#1198](https://github.com/nf-core/tools/issues/1198)) +* Fix `nf-core lint` not filtering modules test when run with `--key` ([#1203](https://github.com/nf-core/tools/issues/1203)) +* Fixed `nf-core modules install` not working when installing from branch with `-b` ([#1218](https://github.com/nf-core/tools/issues/1218)) +* Added prompt to choose between updating all modules or named module in `nf-core modules update` +* Check if modules is installed before trying to update in `nf-core modules update` +* Verify that a commit SHA provided with `--sha` exists for `install/update` commands +* Add new-line to `main.nf` after `bump-versions` command to make ECLint happy + ## [v2.0.1 - Palladium Platypus Junior](https://github.com/nf-core/tools/releases/tag/2.0.1) - [2021-07-13] ### Template diff --git a/README.md b/README.md index c8859b374b..3a8fbc47ab 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,8 @@ A python package with helper tools for the nf-core community. * [`modules list` - List available modules](#list-modules) * [`modules list remote` - List remote modules](#list-remote-modules) * [`modules list local` - List installed modules](#list-installed-modules) - * [`modules install` - Install or update modules in pipeline](#install-or-update-modules-in-a-pipeline) + * [`modules install` - Install modules in a pipeline](#install-modules-in-a-pipeline) + * [`modules update` - Update modules in a pipeline](#update-modules-in-a-pipeline) * [`modules remove` - Remove a module from a pipeline](#remove-a-module-from-a-pipeline) * [`modules create` - Create a module from the template](#create-a-new-module) * [`modules create-test-yml` - Create the `test.yml` file for a module](#create-a-module-test-config-file) @@ -887,7 +888,7 @@ By default, the tool will collect workflow variables from the current branch in You can supply the `--from-branch` flag to specific a different branch. Finally, if you give the `--pull-request` flag, the command will push any changes to the remote and attempt to create a pull request using the GitHub API. -The GitHub username and repository name will be fetched from the remote url (see `git remote -v | grep origin`), or can be supplied with `--username` and `--repository`. +The GitHub username and repository name will be fetched from the remote url (see `git remote -v | grep origin`), or can be supplied with `--username` and `--github-repository`. To create the pull request, a personal access token is required for API authentication. These can be created at [https://github.com/settings/tokens](https://github.com/settings/tokens). @@ -901,6 +902,13 @@ This allows multiple pipelines to use the same code for share tools and gives a The nf-core DSL2 modules repository is at +The modules supercommand comes with two flags for specifying a custom remote: + +* `--github-repository `: Specify the repository from which the modules should be fetched. Defaults to `nf-core/modules`. +* `--branch `: Specify the branch from which the modules shoudl be fetched. Defaults to `master`. + +Note that a custom remote must follow a similar directory structure to that of `nf-core/moduleś` for the `nf-core modules` commands to work properly. + ### List modules The `nf-core modules list` command provides the subcommands `remote` and `local` for listing modules installed in a remote repository and in the local pipeline respectively. Both subcommands come with the `--key ` option for filtering the modules by keywords. @@ -962,7 +970,7 @@ INFO Modules installed in '.': └─────────────┴─────────────────┴─────────────┴────────────────────────────────────────────────────────┴────────────┘ ``` -### Install or update modules in a pipeline +### Install modules in a pipeline You can install modules from [nf-core/modules](https://github.com/nf-core/modules) in your pipeline using `nf-core modules install`. A module installed this way will be installed to the `./modules/nf-core/modules` directory. @@ -978,20 +986,77 @@ $ nf-core modules install nf-core/tools version 2.0 ? Tool name: cat/fastq -? Select 'cat/fastq' version: Rename software/ directory to modules/ to re-organise module structure ...truncated... INFO Installing cat/fastq INFO Downloaded 3 files to ./modules/nf-core/modules/cat/fastq ``` -You can pass the module name as an optional argument to `nf-core modules install` instead of using the cli prompt, eg: `nf-core modules install fastqc`. +You can pass the module name as an optional argument to `nf-core modules install` instead of using the cli prompt, eg: `nf-core modules install fastqc`. You can specify a pipeline directory other than the current working directory by using the `--dir `. -There are five flags that you can use with this command: +There are three additional flags that you can use when installing a module: -* `--dir `: Specify a pipeline directory other than the current working directory. -* `--latest`: Install the latest version of the module instead of specifying the version using the cli prompt. * `--force`: Overwrite a previously installed version of the module. +* `--prompt`: Select the module version using a cli prompt. +* `--sha `: Install the module at a specific commit from the `nf-core/modules` repository. + +### Update modules in a pipeline + +You can update modules installed from a remote repository in your pipeline using `nf-core modules update`. + +```console +$ nf-core modules update + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + + nf-core/tools version 2.0 + +? Tool name: fastqc +INFO Updating 'nf-core/modules/fastqc' +INFO Downloaded 3 files to ./modules/nf-core/modules/fastqc +``` + +You can pass the module name as an optional argument to `nf-core modules update` instead of using the cli prompt, eg: `nf-core modules update fastqc`. You can specify a pipeline directory other than the current working directory by using the `--dir `. + +There are four additional flags that you can use with this command: + +* `--force`: Reinstall module even if it appears to be up to date +* `--prompt`: Select the module version using a cli prompt. * `--sha `: Install the module at a specific commit from the `nf-core/modules` repository. -* `--all`: Use this flag to change versions on all installed modules. Has the same effect as running `nf-core modules install --force --latest` on all installed modules. To change all modules to a specific version you can run `nf-core modules install --all --sha `. +* `--all`: Use this flag to run the command on all modules in the pipeline. + +If you don't want to update certain modules or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `star/align` module installed from `nf-core/modules` from being updated by adding the following to the `.nf-core.yml` file: + +```yaml +update: + nf-core/modules: + star/align: False +``` + +If you want this module to be updated only to a specific version (or downgraded), you could instead specifiy the version: + +```yaml +update: + nf-core/modules: + star/align: "e937c7950af70930d1f34bb961403d9d2aa81c7" +``` + +This also works at the repository level. For example, if you want to exclude all modules installed from `nf-core/modules` from being updated you could add: + +```yaml +update: + nf-core/modules: False +``` + +or if you want all modules in `nf-core/modules` at a specific version: + +```yaml +update: + nf-core/modules: "e937c7950af70930d1f34bb961403d9d2aa81c7" +``` + +Note that the module versions specified in the `.nf-core.yml` file has higher precedence than versions specified with the command line flags, thus aiding you in writing reproducible pipelines. ### Remove a module from a pipeline diff --git a/docs/api/_src/_static/js/custom.js b/docs/api/_src/_static/js/custom.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 096dcfea9f..483d709780 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -209,7 +209,7 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all @nf_core_cli.command(help_priority=3) @click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--release", type=str, help="Pipeline release") +@click.option("-r", "--revision", type=str, help="Pipeline release") @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( "-x", "--compress", type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), help="Archive compression type" @@ -223,7 +223,7 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all help="Don't / do copy images to the output directory and set 'singularity.cacheDir' in workflow", ) @click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads") -def download(pipeline, release, outdir, compress, force, container, singularity_cache_only, parallel_downloads): +def download(pipeline, revision, outdir, compress, force, container, singularity_cache_only, parallel_downloads): """ Download a pipeline, nf-core/configs and pipeline singularity images. @@ -231,7 +231,7 @@ def download(pipeline, release, outdir, compress, force, container, singularity_ workflow to use relative paths to the configs and singularity images. """ dl = nf_core.download.DownloadWorkflow( - pipeline, release, outdir, compress, force, container, singularity_cache_only, parallel_downloads + pipeline, revision, outdir, compress, force, container, singularity_cache_only, parallel_downloads ) dl.download_workflow() @@ -349,15 +349,15 @@ def lint(dir, release, fix, key, show_passed, fail_ignored, markdown, json): ## nf-core module subcommands @nf_core_cli.group(cls=CustomHelpOrder, help_priority=7) @click.option( - "-r", - "--repository", + "-g", + "--github-repository", type=str, default="nf-core/modules", help="GitHub repository hosting modules.", ) @click.option("-b", "--branch", type=str, default="master", help="Branch of GitHub repository hosting modules.") @click.pass_context -def modules(ctx, repository, branch): +def modules(ctx, github_repository, branch): """ Tools to manage Nextflow DSL2 modules as hosted on nf-core/modules. """ @@ -367,7 +367,7 @@ def modules(ctx, repository, branch): # Make repository object to pass to subcommands try: - ctx.obj["modules_repo_obj"] = nf_core.modules.ModulesRepo(repository, branch) + ctx.obj["modules_repo_obj"] = nf_core.modules.ModulesRepo(github_repository, branch) except LookupError as e: log.critical(e) sys.exit(1) @@ -421,18 +421,17 @@ def local(ctx, keywords, json, dir): @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", help="Pipeline directory. Defaults to CWD") -@click.option("-l", "--latest", is_flag=True, default=False, help="Install the latest version of the module") -@click.option("-f", "--force", is_flag=True, default=False, help="Force installation of module if it already exists") +@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") +@click.option("-f", "--force", is_flag=True, default=False, help="Force reinstallation of module if it already exists") @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -@click.option("-a", "--all", is_flag=True, default=False, help="Update all modules installed in pipeline") -def install(ctx, tool, dir, latest, force, sha, all): +def install(ctx, tool, dir, prompt, force, sha): """ - Install/update DSL2 modules within a pipeline. + Install DSL2 modules within a pipeline. Fetches and installs module files from a remote repo e.g. nf-core/modules. """ try: - module_install = nf_core.modules.ModuleInstall(dir, force=force, latest=latest, sha=sha, update_all=all) + module_install = nf_core.modules.ModuleInstall(dir, force=force, prompt=prompt, sha=sha) module_install.modules_repo = ctx.obj["modules_repo_obj"] exit_status = module_install.install(tool) if not exit_status and all: @@ -446,6 +445,31 @@ def install(ctx, tool, dir, latest, force, sha, all): @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", help="Pipeline directory. Defaults to CWD") +@click.option("-f", "--force", is_flag=True, default=False, help="Force update of module") +@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") +@click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") +@click.option("-a", "--all", is_flag=True, default=False, help="Update all modules installed in pipeline") +def update(ctx, tool, dir, force, prompt, sha, all): + """ + Update DSL2 modules within a pipeline. + + Fetches and updates module files from a remote repo e.g. nf-core/modules. + """ + try: + module_install = nf_core.modules.ModuleUpdate(dir, force=force, prompt=prompt, sha=sha, update_all=all) + module_install.modules_repo = ctx.obj["modules_repo_obj"] + exit_status = module_install.update(tool) + if not exit_status and all: + sys.exit(1) + except UserWarning as e: + log.error(e) + sys.exit(1) + + +@modules.command(help_priority=4) +@click.pass_context +@click.argument("tool", type=str, required=False, metavar=" or ") +@click.option("-d", "--dir", type=click.Path(exists=True), default=".", help="Pipeline directory. Defaults to CWD") def remove(ctx, dir, tool): """ Remove a module from a pipeline. @@ -459,11 +483,11 @@ def remove(ctx, dir, tool): sys.exit(1) -@modules.command("create", help_priority=4) +@modules.command("create", help_priority=5) @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") -@click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username") +@click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'") @click.option("-l", "--label", type=str, metavar="", help="Standard resource label for process") @click.option("-m", "--meta", is_flag=True, default=False, help="Use Groovy meta map for sample information") @click.option("-n", "--no-meta", is_flag=True, default=False, help="Don't use meta map for sample information") @@ -497,10 +521,10 @@ def create_module(ctx, tool, dir, author, label, meta, no_meta, force, conda_nam sys.exit(1) -@modules.command("create-test-yml", help_priority=5) +@modules.command("create-test-yml", help_priority=6) @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-r", "--run-tests", is_flag=True, default=False, help="Run the test workflows") +@click.option("-t", "--run-tests", is_flag=True, default=False, help="Run the test workflows") @click.option("-o", "--output", type=str, help="Path for output YAML file") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output YAML file if it already exists") @click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") @@ -519,7 +543,7 @@ def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): sys.exit(1) -@modules.command(help_priority=6) +@modules.command(help_priority=7) @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @@ -551,7 +575,7 @@ def lint(ctx, tool, dir, key, all, local, passed): sys.exit(1) -@modules.command(help_priority=7) +@modules.command(help_priority=8) @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @@ -710,9 +734,9 @@ def bump_version(new_version, dir, nextflow): @click.option("-d", "--dir", type=click.Path(exists=True), default=".", help="Pipeline directory. Defaults to CWD") @click.option("-b", "--from-branch", type=str, help="The git branch to use to fetch workflow vars.") @click.option("-p", "--pull-request", is_flag=True, default=False, help="Make a GitHub pull-request with the changes.") -@click.option("-r", "--repository", type=str, help="GitHub PR: target repository.") +@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") -def sync(dir, from_branch, pull_request, repository, username): +def sync(dir, from_branch, pull_request, github_repository, username): """ Sync a pipeline TEMPLATE branch with the nf-core template. @@ -732,7 +756,7 @@ def sync(dir, from_branch, pull_request, repository, username): raise # Sync the given pipeline dir - sync_obj = nf_core.sync.PipelineSync(dir, from_branch, pull_request, repository, username) + sync_obj = nf_core.sync.PipelineSync(dir, from_branch, pull_request, github_repository, username) try: sync_obj.sync() except (nf_core.sync.SyncException, nf_core.sync.PullRequestException) as e: diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 1563c3ec9b..29f3a39f90 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -97,11 +97,14 @@ def bump_nextflow_version(pipeline_obj, new_version): r"nextflow%20DSL2-%E2%89%A5{}-23aa62.svg".format(current_version.replace(".", r"\.")), "nextflow%20DSL2-%E2%89%A5{}-23aa62.svg".format(new_version), ), + ( + # Replace links to 'nf-co.re' installation page with links to Nextflow installation page + r"https://nf-co.re/usage/installation", + "https://www.nextflow.io/docs/latest/getstarted.html#installation", + ), ( # example: 1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=20.04.0`) - r"1\.\s*Install\s*\[`Nextflow`\]\(https://www.nextflow.io/docs/latest/getstarted.html#installation\)\s*\(`>={}`\)".format( - current_version.replace(".", r"\.") - ), + r"1\.\s*Install\s*\[`Nextflow`\]\(y\)\s*\(`>={}`\)".format(current_version.replace(".", r"\.")), "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>={}`)".format( new_version ), diff --git a/nf_core/download.py b/nf_core/download.py index a16f494767..bb83564953 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -71,7 +71,7 @@ class DownloadWorkflow(object): Args: pipeline (str): A nf-core pipeline name. - release (str): The workflow release version to download, like `1.0`. Defaults to None. + revision (str): The workflow revision to download, like `1.0`. Defaults to None. singularity (bool): Flag, if the Singularity container should be downloaded as well. Defaults to False. outdir (str): Path to the local download directory. Defaults to None. """ @@ -79,7 +79,7 @@ class DownloadWorkflow(object): def __init__( self, pipeline=None, - release=None, + revision=None, outdir=None, compress_type=None, force=False, @@ -88,7 +88,7 @@ def __init__( parallel_downloads=4, ): self.pipeline = pipeline - self.release = release + self.revision = revision self.outdir = outdir self.output_filename = None self.compress_type = compress_type @@ -97,7 +97,7 @@ def __init__( self.singularity_cache_only = singularity_cache_only self.parallel_downloads = parallel_downloads - self.wf_releases = {} + self.wf_revisions = {} self.wf_branches = {} self.wf_sha = None self.wf_download_url = None @@ -114,11 +114,11 @@ def download_workflow(self): # Get workflow details try: self.prompt_pipeline_name() - self.pipeline, self.wf_releases, self.wf_branches = nf_core.utils.get_repo_releases_branches( + self.pipeline, self.wf_revisions, self.wf_branches = nf_core.utils.get_repo_releases_branches( self.pipeline, self.wfs ) - self.prompt_release() - self.get_release_hash() + self.prompt_revision() + self.get_revision_hash() self.prompt_container_download() self.prompt_use_singularity_cachedir() self.prompt_singularity_cachedir_only() @@ -127,7 +127,7 @@ def download_workflow(self): log.critical(e) sys.exit(1) - summary_log = [f"Pipeline release: '{self.release}'", f"Pull containers: '{self.container}'"] + summary_log = [f"Pipeline revision: '{self.revision}'", f"Pull containers: '{self.container}'"] if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: summary_log.append( "Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) @@ -194,41 +194,41 @@ def prompt_pipeline_name(self): stderr.print("Specify the name of a nf-core pipeline or a GitHub repository name (user/repo).") self.pipeline = nf_core.utils.prompt_remote_pipeline_name(self.wfs) - def prompt_release(self): - """Prompt for pipeline release / branch""" - # Prompt user for release tag if '--release' was not set - if self.release is None: - self.release = nf_core.utils.prompt_pipeline_release_branch(self.wf_releases, self.wf_branches) + def prompt_revision(self): + """Prompt for pipeline revision / branch""" + # Prompt user for revision tag if '--revision' was not set + if self.revision is None: + self.revision = nf_core.utils.prompt_pipeline_release_branch(self.wf_revisions, self.wf_branches) - def get_release_hash(self): - """Find specified release / branch hash""" + def get_revision_hash(self): + """Find specified revision / branch hash""" # Branch - if self.release in self.wf_branches.keys(): - self.wf_sha = self.wf_branches[self.release] + if self.revision in self.wf_branches.keys(): + self.wf_sha = self.wf_branches[self.revision] - # Release + # Revision else: - for r in self.wf_releases: - if r["tag_name"] == self.release: + for r in self.wf_revisions: + if r["tag_name"] == self.revision: self.wf_sha = r["tag_sha"] break - # Can't find the release or branch - throw an error + # Can't find the revisions or branch - throw an error else: log.info( - "Available {} releases: '{}'".format( - self.pipeline, "', '".join([r["tag_name"] for r in self.wf_releases]) + "Available {} revisions: '{}'".format( + self.pipeline, "', '".join([r["tag_name"] for r in self.wf_revisions]) ) ) log.info("Available {} branches: '{}'".format(self.pipeline, "', '".join(self.wf_branches.keys()))) raise AssertionError( - "Not able to find release / branch '{}' for {}".format(self.release, self.pipeline) + "Not able to find revision / branch '{}' for {}".format(self.revision, self.pipeline) ) # Set the outdir if not self.outdir: - self.outdir = "{}-{}".format(self.pipeline.replace("/", "-").lower(), self.release) + self.outdir = "{}-{}".format(self.pipeline.replace("/", "-").lower(), self.revision) # Set the download URL and return self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.pipeline, self.wf_sha) diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 53ecaaffa9..b61765b162 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -42,8 +42,23 @@ def run_linting( An object of type :class:`PipelineLint` that contains all the linting results. """ + # Verify that the requested tests exist + if key: + all_tests = set(PipelineLint._get_all_lint_tests(release_mode)).union(set(ModuleLint._get_all_lint_tests())) + bad_keys = [k for k in key if k not in all_tests] + if len(bad_keys) > 0: + raise AssertionError( + "Test name{} not recognised: '{}'".format( + "s" if len(bad_keys) > 1 else "", + "', '".join(bad_keys), + ) + ) + log.info("Only running tests: '{}'".format("', '".join(key))) + # Create the lint object - lint_obj = PipelineLint(pipeline_dir, release_mode, fix, key, fail_ignored) + pipeline_keys = list(set(key).intersection(set(PipelineLint._get_all_lint_tests(release_mode)))) if key else [] + + lint_obj = PipelineLint(pipeline_dir, release_mode, fix, pipeline_keys, fail_ignored) # Load the various pipeline configs lint_obj._load_lint_config() @@ -53,8 +68,19 @@ def run_linting( # Create the modules lint object module_lint_obj = ModuleLint(pipeline_dir) + # Verify that the pipeline is correctly configured + try: + module_lint_obj.has_valid_directory() + except UserWarning: + raise + # Run only the tests we want - module_lint_tests = ("module_changes", "module_version") + if key: + # Select only the module lint tests + module_lint_tests = list(set(key).intersection(set(ModuleLint._get_all_lint_tests()))) + else: + # If no key is supplied, run the default modules tests + module_lint_tests = ("module_changes", "module_version") module_lint_obj.filter_tests_by_key(module_lint_tests) # Set up files for modules linting test @@ -154,7 +180,14 @@ def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=Fal self.passed = [] self.warned = [] self.could_fix = [] - self.lint_tests = [ + self.lint_tests = self._get_all_lint_tests(self.release_mode) + self.fix = fix + self.key = key + self.progress_bar = None + + @staticmethod + def _get_all_lint_tests(release_mode): + return [ "files_exist", "nextflow_config", "files_unchanged", @@ -171,12 +204,7 @@ def __init__(self, wf_path, release_mode=False, fix=(), key=(), fail_ignored=Fal "actions_schema_validation", "merge_markers", "modules_json", - ] - if self.release_mode: - self.lint_tests.extend(["version_consistency"]) - self.fix = fix - self.key = key - self.progress_bar = None + ] + (["version_consistency"] if release_mode else []) def _load(self): """Load information about the pipeline into the PipelineLint object""" @@ -234,7 +262,6 @@ def _lint_pipeline(self): # If -k supplied, only run these tests if self.key: - log.info("Only running tests: '{}'".format("', '".join(self.key))) self.lint_tests = [k for k in self.lint_tests if k in self.key] # Check that the pipeline_dir is a clean git repo diff --git a/nf_core/module-template/modules/main.nf b/nf_core/module-template/modules/main.nf index 2dfc4eef82..6e4dcde636 100644 --- a/nf_core/module-template/modules/main.nf +++ b/nf_core/module-template/modules/main.nf @@ -15,7 +15,8 @@ include { initOptions; saveFiles; getSoftwareName } from './functions' // unless there is a run-time, storage advantage in implementing in this way // e.g. it's ok to have a single module for bwa to output BAM instead of SAM: // bwa mem | samtools view -B -T ref.fasta -// TODO nf-core: Optional inputs are not currently supported by Nextflow. However, "fake files" MAY be used to work around this issue. +// TODO nf-core: Optional inputs are not currently supported by Nextflow. However, using an empty +// list (`[]`) instead of a file can be used to work around this issue. params.options = [:] options = initOptions(params.options) @@ -33,9 +34,9 @@ process {{ tool_name_underscore|upper }} { // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. conda (params.enable_conda ? "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/{{ singularity_container if singularity_container else 'YOUR-TOOL-HERE' }}" + container "{{ singularity_container if singularity_container else 'https://depot.galaxyproject.org/singularity/YOUR-TOOL-HERE' }}" } else { - container "quay.io/biocontainers/{{ docker_container if docker_container else 'YOUR-TOOL-HERE' }}" + container "{{ docker_container if docker_container else 'quay.io/biocontainers/YOUR-TOOL-HERE' }}" } input: diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 82b3cf76ba..dbce4bd915 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -6,4 +6,5 @@ from .module_utils import ModuleException from .list import ModuleList from .install import ModuleInstall +from .update import ModuleUpdate from .remove import ModuleRemove diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 63b98068f1..9189cbb1f6 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -198,7 +198,7 @@ def bump_module_version(self, module: NFCoreModule): newcontent.append(line) if found_match: - content = "\n".join(newcontent) + content = "\n".join(newcontent) + "\n" else: self.failed.append( (f"Did not find pattern {pattern[0]} in module {module.module_name}", module.module_name) diff --git a/nf_core/modules/install.py b/nf_core/modules/install.py index eca4a19db2..b0d96c4311 100644 --- a/nf_core/modules/install.py +++ b/nf_core/modules/install.py @@ -3,6 +3,7 @@ import logging import nf_core.utils +import nf_core.modules.module_utils from .modules_command import ModuleCommand from .module_utils import get_installed_modules, get_module_git_log, module_exist_in_repo @@ -12,10 +13,10 @@ class ModuleInstall(ModuleCommand): - def __init__(self, pipeline_dir, force=False, latest=False, sha=None, update_all=False): + def __init__(self, pipeline_dir, force=False, prompt=False, sha=None, update_all=False): super().__init__(pipeline_dir) self.force = force - self.latest = latest + self.prompt = prompt self.sha = sha self.update_all = update_all @@ -24,218 +25,112 @@ def install(self, module): log.error("You cannot install a module in a clone of nf-core/modules") return False # Check whether pipelines is valid - self.has_valid_directory() - if not self.update_all: - # Get the available modules + if not self.has_valid_directory(): + return False + + # Verify that 'modules.json' is consistent with the installed modules + self.modules_json_up_to_date() + + # Get the available modules + try: + self.modules_repo.get_modules_file_tree() + except LookupError as e: + log.error(e) + return False + + if self.prompt and self.sha is not None: + log.error("Cannot use '--sha' and '--prompt' at the same time!") + return False + + # Verify that the provided SHA exists in the repo + if self.sha: try: - self.modules_repo.get_modules_file_tree() + nf_core.modules.module_utils.sha_exists(self.sha, self.modules_repo) + except UserWarning: + log.error(f"Commit SHA '{self.sha}' doesn't exist in '{self.modules_repo.name}'") + return False except LookupError as e: log.error(e) return False - if self.latest and self.sha is not None: - log.error("Cannot use '--sha' and '--latest' at the same time!") - return False + if module is None: + module = questionary.autocomplete( + "Tool name:", + choices=self.modules_repo.modules_avail_module_names, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() - if module is None: - module = questionary.autocomplete( - "Tool name:", - choices=self.modules_repo.modules_avail_module_names, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - - # Check that the supplied name is an available module - if module and module not in self.modules_repo.modules_avail_module_names: - log.error("Module '{}' not found in list of available modules.".format(module)) - log.info("Use the command 'nf-core modules list' to view available software") - return False - repos_and_modules = [(self.modules_repo, module)] - else: - if module: - raise UserWarning("You cannot specify a module and use the '--all' flag at the same time") - self.force = True - - self.get_pipeline_modules() - repos_and_modules = [ - (ModulesRepo(repo=repo_name), modules) for repo_name, modules in self.module_names.items() - ] - # Load the modules file trees - for repo, _ in repos_and_modules: - repo.get_modules_file_tree() - repos_and_modules = [(repo, module) for repo, modules in repos_and_modules for module in modules] + # Check that the supplied name is an available module + if module and module not in self.modules_repo.modules_avail_module_names: + log.error("Module '{}' not found in list of available modules.".format(module)) + log.info("Use the command 'nf-core modules list' to view available software") + return False # Load 'modules.json' modules_json = self.load_modules_json() if not modules_json: return False - exit_value = True - for modules_repo, module in repos_and_modules: - if not module_exist_in_repo(module, modules_repo): - warn_msg = f"Module '{module}' not found in remote '{modules_repo.name}' ({modules_repo.branch})" - if self.update_all: - warn_msg += ". Skipping..." - log.warning(warn_msg) - exit_value = False - continue - - if modules_repo.name in modules_json["repos"]: - current_entry = modules_json["repos"][modules_repo.name].get(module) - else: - current_entry = None - - # Set the install folder based on the repository name - install_folder = [modules_repo.owner, modules_repo.repo] - - # Compute the module directory - module_dir = os.path.join(self.dir, "modules", *install_folder, module) - - if current_entry is not None and self.sha is None: - # Fetch the latest commit for the module - current_version = current_entry["git_sha"] - try: - git_log = get_module_git_log(module, modules_repo=modules_repo, per_page=1, page_nbr=1) - except LookupError as e: - log.error(e) - exit_value = False - continue - except UserWarning: - log.error(f"Was unable to fetch version of '{modules_repo.name}/{module}'") - exit_value = False - continue - latest_version = git_log[0]["git_sha"] - if current_version == latest_version and (not self.force or self.latest or self.update_all): - log.info(f"'{modules_repo.name}/{module}' is already up to date") - continue - elif not self.force: - log.error("Found newer version of module.") - self.latest = self.force = questionary.confirm( - "Do you want to install it? (--force --latest)", default=False - ).unsafe_ask() - if not self.latest: - exit_value = False - continue - else: - latest_version = None - - # Check that we don't already have a folder for this module - if not self.check_module_files_installed(module, module_dir): - exit_value = False - continue - - if self.sha: - if current_entry is not None: - if self.force: - if current_entry["git_sha"] == self.sha: - log.info(f"Module {modules_repo.name}/{module} already installed at {self.sha}") - continue - else: - exit_value = False - continue - - if self.force: - log.info(f"Removing old version of module '{module}'") - self.clear_module_dir(module, module_dir) - - if self.download_module_file(module, self.sha, modules_repo, install_folder, module_dir): - self.update_modules_json(modules_json, modules_repo.name, module, self.sha) - else: - exit_value = False - continue - else: - if self.latest or self.update_all: - # Fetch the latest commit for the module - if latest_version is None: - try: - git_log = get_module_git_log(module, modules_repo=modules_repo, per_page=1, page_nbr=1) - except UserWarning: - log.error(f"Was unable to fetch version of module '{module}'") - exit_value = False - continue - latest_version = git_log[0]["git_sha"] - version = latest_version - else: - try: - version = self.prompt_module_version_sha( - module, - installed_sha=current_entry["git_sha"] if not current_entry is None else None, - modules_repo=modules_repo, - ) - except SystemError as e: - log.error(e) - exit_value = False - continue - log.info(f"Installing {module}") - log.debug( - f"Installing module '{module}' at modules hash {modules_repo.modules_current_hash} from {self.modules_repo.name}" - ) + if not module_exist_in_repo(module, self.modules_repo): + warn_msg = f"Module '{module}' not found in remote '{self.modules_repo.name}' ({self.modules_repo.branch})" + log.warning(warn_msg) + return False - if self.force: - log.info(f"Removing old version of module '{module}'") - self.clear_module_dir(module, module_dir) - - # Download module files - if not self.download_module_file(module, version, modules_repo, install_folder, module_dir): - exit_value = False - continue - - # Update module.json with newly installed module - self.update_modules_json(modules_json, modules_repo.name, module, version) - return exit_value - - def check_module_files_installed(self, module_name, module_dir): - """Checks if a module is already installed""" - if os.path.exists(module_dir): - if not self.force: - log.error(f"Module directory '{module_dir}' already exists.") - self.force = questionary.confirm( - "Do you want to overwrite local files? (--force)", default=False - ).unsafe_ask() - return self.force + if self.modules_repo.name in modules_json["repos"]: + current_entry = modules_json["repos"][self.modules_repo.name].get(module) else: - return True - - def prompt_module_version_sha(self, module, installed_sha=None, modules_repo=None): - if modules_repo is None: - modules_repo = self.modules_repo - older_commits_choice = questionary.Choice( - title=[("fg:ansiyellow", "older commits"), ("class:choice-default", "")], value="" - ) - git_sha = "" - page_nbr = 1 - try: - next_page_commits = get_module_git_log(module, modules_repo=modules_repo, per_page=10, page_nbr=page_nbr) - except UserWarning: - next_page_commits = None - except LookupError as e: - log.warning(e) - next_page_commits = None + current_entry = None - while git_sha is "": - commits = next_page_commits + # Set the install folder based on the repository name + install_folder = [self.modules_repo.owner, self.modules_repo.repo] + + # Compute the module directory + module_dir = os.path.join(self.dir, "modules", *install_folder, module) + + # Check that the module is not already installed + if (current_entry is not None and os.path.exists(module_dir)) and not self.force: + + log.error(f"Module is already installed.") + repo_flag = "" if self.modules_repo.name == "nf-core/modules" else f"-g {self.modules_repo.name} " + branch_flag = "" if self.modules_repo.branch == "master" else f"-b {self.modules_repo.branch} " + + log.info( + f"To update '{module}' run 'nf-core modules {repo_flag}{branch_flag}update {module}'. To force reinstallation use '--force'" + ) + return False + + if self.sha: + version = self.sha + elif self.prompt: try: - next_page_commits = get_module_git_log( - module, modules_repo=modules_repo, per_page=10, page_nbr=page_nbr + 1 + version = nf_core.modules.module_utils.prompt_module_version_sha( + module, + installed_sha=current_entry["git_sha"] if not current_entry is None else None, + modules_repo=self.modules_repo, ) + except SystemError as e: + log.error(e) + return False + else: + # Fetch the latest commit for the module + try: + git_log = get_module_git_log(module, modules_repo=self.modules_repo, per_page=1, page_nbr=1) except UserWarning: - next_page_commits = None - except LookupError as e: - log.warning(e) - next_page_commits = None - - choices = [] - for title, sha in map(lambda commit: (commit["trunc_message"], commit["git_sha"]), commits): - - display_color = "fg:ansiblue" if sha != installed_sha else "fg:ansired" - message = f"{title} {sha}" - if installed_sha == sha: - message += " (installed version)" - commit_display = [(display_color, message), ("class:choice-default", "")] - choices.append(questionary.Choice(title=commit_display, value=sha)) - if next_page_commits is not None: - choices += [older_commits_choice] - git_sha = questionary.select( - f"Select '{module}' version:", choices=choices, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - page_nbr += 1 - return git_sha + log.error(f"Was unable to fetch version of module '{module}'") + return False + version = git_log[0]["git_sha"] + + if self.force: + log.info(f"Removing installed version of '{self.modules_repo.name}/{module}'") + self.clear_module_dir(module, module_dir) + + log.info(f"{'Rei' if self.force else 'I'}nstalling '{module}'") + log.debug(f"Installing module '{module}' at modules hash {version} from {self.modules_repo.name}") + + # Download module files + if not self.download_module_file(module, version, self.modules_repo, install_folder, module_dir): + return False + + # Update module.json with newly installed module + self.update_modules_json(modules_json, self.modules_repo.name, module, version) + return True diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 433735034f..f9de48a304 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -79,8 +79,7 @@ def __init__(self, dir): self.warned = [] self.failed = [] self.modules_repo = ModulesRepo() - self.lint_tests = ["main_nf", "functions_nf", "meta_yml", "module_changes", "module_todos"] - + self.lint_tests = self._get_all_lint_tests() # Get lists of modules install in directory self.all_local_modules, self.all_nfcore_modules = self.get_installed_modules() @@ -95,6 +94,10 @@ def __init__(self, dir): # Add as first test to load git_sha before module_changes self.lint_tests.insert(0, "module_version") + @staticmethod + def _get_all_lint_tests(): + return ["main_nf", "functions_nf", "meta_yml", "module_changes", "module_todos"] + def lint(self, module=None, key=(), all_modules=False, print_results=True, show_passed=False, local=False): """ Lint all or one specific module diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 979c2fe779..018dc99af2 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -206,7 +206,7 @@ def check_process_section(self, lines): ("bioconda_latest", f"Conda update: {package} `{ver}` -> `{last_ver}`", self.main_nf) ) else: - self.passed.append(("bioconda_latest", "Conda package is the latest available: `{bp}`", self.main_nf)) + self.passed.append(("bioconda_latest", f"Conda package is the latest available: `{bp}`", self.main_nf)) if docker_tag == singularity_tag: return True diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index 0b667a928c..355b952c4c 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -40,10 +40,6 @@ def pattern_msg(keywords): # No pipeline given - show all remote if self.remote: - log.info( - f"Modules available from {self.modules_repo.name} ({self.modules_repo.branch})" - f"{pattern_msg(keywords)}:\n" - ) # Get the list of available modules try: @@ -68,8 +64,6 @@ def pattern_msg(keywords): # We have a pipeline - list what's installed else: - log.info(f"Modules installed in '{self.dir}'{pattern_msg(keywords)}:\n") - # Check whether pipelines is valid try: self.has_valid_directory() @@ -77,6 +71,9 @@ def pattern_msg(keywords): log.error(e) return "" + # Verify that 'modules.json' is consistent with the installed modules + self.modules_json_up_to_date() + # Get installed modules self.get_pipeline_modules() @@ -120,4 +117,12 @@ def pattern_msg(keywords): if print_json: return json.dumps(modules, sort_keys=True, indent=4) + + if self.remote: + log.info( + f"Modules available from {self.modules_repo.name} ({self.modules_repo.branch})" + f"{pattern_msg(keywords)}:\n" + ) + else: + log.info(f"Modules installed in '{self.dir}'{pattern_msg(keywords)}:\n") return table diff --git a/nf_core/modules/module_utils.py b/nf_core/modules/module_utils.py index 9177b646f8..00d32090ef 100644 --- a/nf_core/modules/module_utils.py +++ b/nf_core/modules/module_utils.py @@ -5,6 +5,7 @@ import logging import rich import datetime +import questionary import nf_core.utils @@ -38,9 +39,7 @@ def module_exist_in_repo(module_name, modules_repo): return not (response.status_code == 404) -def get_module_git_log( - module_name, owner="nf-core", modules_repo=None, per_page=30, page_nbr=1, since="2021-07-07T00:00:00Z" -): +def get_module_git_log(module_name, modules_repo=None, per_page=30, page_nbr=1, since="2021-07-07T00:00:00Z"): """ Fetches the commit history the of requested module since a given date. The default value is not arbitrary - it is the last time the structure of the nf-core/modules repository was had an @@ -57,8 +56,13 @@ def get_module_git_log( """ if modules_repo is None: modules_repo = ModulesRepo() + api_url = f"https://api.github.com/repos/{modules_repo.name}/commits" + api_url += f"?sha{modules_repo.branch}" + if module_name is not None: + api_url += f"&path=modules/{module_name}" + api_url += f"&page={page_nbr}" + api_url += f"&since={since}" - api_url = f"https://api.github.com/repos/{modules_repo.name}/commits?sha=master&path=modules/{module_name}&per_page={per_page}&page={page_nbr}&since={since}" log.debug(f"Fetching commit history of module '{module_name}' from github API") response = requests.get(api_url, auth=nf_core.utils.github_api_auto_auth()) if response.status_code == 200: @@ -154,7 +158,7 @@ def create_modules_json(pipeline_dir): file_progress = progress_bar.add_task( "Creating 'modules.json' file", total=sum(map(len, repo_module_names.values())), test_name="module.json" ) - for repo_name, module_names in repo_module_names.items(): + for repo_name, module_names in sorted(repo_module_names.items()): try: modules_repo = ModulesRepo(repo=repo_name) except LookupError as e: @@ -162,49 +166,64 @@ def create_modules_json(pipeline_dir): repo_path = os.path.join(modules_dir, repo_name) modules_json["repos"][repo_name] = dict() - for module_name in module_names: + for module_name in sorted(module_names): module_path = os.path.join(repo_path, module_name) progress_bar.update(file_progress, advance=1, test_name=f"{repo_name}/{module_name}") try: - # Find the correct commit SHA for the local files. - # We iterate over the commit log pages until we either - # find a matching commit or we reach the end of the commits - correct_commit_sha = None - commit_page_nbr = 1 - while correct_commit_sha is None: - - commit_shas = [ - commit["git_sha"] - for commit in get_module_git_log( - module_name, modules_repo=modules_repo, page_nbr=commit_page_nbr - ) - ] - correct_commit_sha = find_correct_commit_sha( - module_name, module_path, modules_repo, commit_shas - ) - commit_page_nbr += 1 - - modules_json["repos"][repo_name][module_name] = {"git_sha": correct_commit_sha} - except (UserWarning, LookupError) as e: + correct_commit_sha = find_correct_commit_sha(module_name, module_path, modules_repo) + + except (LookupError, UserWarning) as e: log.warn( f"Could not fetch 'git_sha' for module: '{module_name}'. Please try to install a newer version of this module. ({e})" ) + continue + modules_json["repos"][repo_name][module_name] = {"git_sha": correct_commit_sha} + modules_json_path = os.path.join(pipeline_dir, "modules.json") with open(modules_json_path, "w") as fh: json.dump(modules_json, fh, indent=4) -def find_correct_commit_sha(module_name, module_path, modules_repo, commit_shas): +def find_correct_commit_sha(module_name, module_path, modules_repo): """ Returns the SHA for the latest commit where the local files are identical to the remote files Args: module_name (str): Name of module module_path (str): Path to module in local repo module_repo (str): Remote repo for module - commit_shas ([ str ]): List of commit SHAs for module, sorted in descending order Returns: commit_sha (str): The latest commit SHA where local files are identical to remote files """ + try: + # Find the correct commit SHA for the local files. + # We iterate over the commit log pages until we either + # find a matching commit or we reach the end of the commits + correct_commit_sha = None + commit_page_nbr = 1 + while correct_commit_sha is None: + commit_shas = [ + commit["git_sha"] + for commit in get_module_git_log(module_name, modules_repo=modules_repo, page_nbr=commit_page_nbr) + ] + correct_commit_sha = iterate_commit_log_page(module_name, module_path, modules_repo, commit_shas) + commit_page_nbr += 1 + return correct_commit_sha + except (UserWarning, LookupError) as e: + raise + + +def iterate_commit_log_page(module_name, module_path, modules_repo, commit_shas): + """ + Iterates through a list of commits for a module and checks if the local file contents match the remote + Args: + module_name (str): Name of module + module_path (str): Path to module in local repo + module_repo (str): Remote repo for module + commit_shas ([ str ]): List of commit SHAs for module, sorted in descending order + Returns: + commit_sha (str): The latest commit SHA from 'commit_shas' where local files + are identical to remote files + """ files_to_check = ["main.nf", "functions.nf", "meta.yml"] local_file_contents = [None, None, None] @@ -361,3 +380,58 @@ def verify_pipeline_dir(dir): ) error_msg += "\nThe 'nf-core/software' directory should therefore be renamed to 'nf-core/modules'" raise UserWarning(error_msg) + + +def prompt_module_version_sha(module, modules_repo, installed_sha=None): + older_commits_choice = questionary.Choice( + title=[("fg:ansiyellow", "older commits"), ("class:choice-default", "")], value="" + ) + git_sha = "" + page_nbr = 1 + try: + next_page_commits = get_module_git_log(module, modules_repo=modules_repo, per_page=10, page_nbr=page_nbr) + except UserWarning: + next_page_commits = None + except LookupError as e: + log.warning(e) + next_page_commits = None + + while git_sha is "": + commits = next_page_commits + try: + next_page_commits = get_module_git_log( + module, modules_repo=modules_repo, per_page=10, page_nbr=page_nbr + 1 + ) + except UserWarning: + next_page_commits = None + except LookupError as e: + log.warning(e) + next_page_commits = None + + choices = [] + for title, sha in map(lambda commit: (commit["trunc_message"], commit["git_sha"]), commits): + + display_color = "fg:ansiblue" if sha != installed_sha else "fg:ansired" + message = f"{title} {sha}" + if installed_sha == sha: + message += " (installed version)" + commit_display = [(display_color, message), ("class:choice-default", "")] + choices.append(questionary.Choice(title=commit_display, value=sha)) + if next_page_commits is not None: + choices += [older_commits_choice] + git_sha = questionary.select( + f"Select '{module}' commit:", choices=choices, style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + page_nbr += 1 + return git_sha + + +def sha_exists(sha, modules_repo): + i = 1 + while True: + try: + if sha in {commit["git_sha"] for commit in get_module_git_log(None, modules_repo, page_nbr=i)}: + return True + i += 1 + except (UserWarning, LookupError): + raise diff --git a/nf_core/modules/modules_command.py b/nf_core/modules/modules_command.py index d013022263..e7ba1a5c05 100644 --- a/nf_core/modules/modules_command.py +++ b/nf_core/modules/modules_command.py @@ -3,11 +3,13 @@ import os import glob import shutil +import copy import json import logging import yaml import nf_core.modules.module_utils +import nf_core.utils from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) @@ -66,9 +68,11 @@ def get_pipeline_modules(self): repo_path = os.path.join(module_base_path, repo_name) module_mains_path = f"{repo_path}/**/main.nf" module_mains = glob.glob(module_mains_path, recursive=True) - self.module_names[repo_name] = [ - os.path.dirname(os.path.relpath(mod, repo_path)) for mod in module_mains - ] + if len(module_mains) > 0: + self.module_names[repo_name] = [ + os.path.dirname(os.path.relpath(mod, repo_path)) for mod in module_mains + ] + elif self.repo_type == "modules": module_mains_path = f"{module_base_path}/**/main.nf" module_mains = glob.glob(module_mains_path, recursive=True) @@ -106,6 +110,130 @@ def has_modules_file(self): except UserWarning as e: raise + def modules_json_up_to_date(self): + """ + Checks whether the modules installed in the directory + are consistent with the entries in the 'modules.json' file and vice versa. + + If a module has an entry in the 'modules.json' file but is missing in the directory, + we first try to reinstall the module from the remote and if that fails we remove the entry + in 'modules.json'. + + If a module is installed but the entry in 'modules.json' is missing we iterate through + the commit log in the remote to try to determine the SHA. + """ + mod_json = self.load_modules_json() + fresh_mod_json = copy.deepcopy(mod_json) + self.get_pipeline_modules() + missing_from_modules_json = {} + + # Iterate through all installed modules + # and remove all entries in modules_json which + # are present in the directory + for repo, modules in self.module_names.items(): + if repo in mod_json["repos"]: + for module in modules: + if module in mod_json["repos"][repo]: + mod_json["repos"][repo].pop(module) + else: + if repo not in missing_from_modules_json: + missing_from_modules_json[repo] = [] + missing_from_modules_json[repo].append(module) + if len(mod_json["repos"][repo]) == 0: + mod_json["repos"].pop(repo) + else: + missing_from_modules_json[repo] = modules + + # If there are any modules left in 'modules.json' after all installed are removed, + # we try to reinstall them + if len(mod_json["repos"]) > 0: + missing_but_in_mod_json = [ + f"'{repo}/{module}'" for repo, modules in mod_json["repos"].items() for module in modules + ] + log.info( + f"Reinstalling modules found in 'modules.json' but missing from directory: {', '.join(missing_but_in_mod_json)}" + ) + + remove_from_mod_json = {} + for repo, modules in mod_json["repos"].items(): + try: + modules_repo = ModulesRepo(repo=repo) + modules_repo.get_modules_file_tree() + install_folder = [modules_repo.owner, modules_repo.repo] + except LookupError as e: + remove_from_mod_json[repo] = list(modules.keys()) + continue + + for module, entry in modules.items(): + sha = entry.get("git_sha") + if sha is None: + if repo not in remove_from_mod_json: + remove_from_mod_json[repo] = [] + remove_from_mod_json[repo].append(module) + continue + module_dir = os.path.join(self.dir, "modules", *install_folder, module) + self.download_module_file(module, sha, modules_repo, install_folder, module_dir) + + # If the reinstall fails, we remove those entries in 'modules.json' + if sum(map(len, remove_from_mod_json.values())) > 0: + uninstallable_mods = [ + f"'{repo}/{module}'" for repo, modules in remove_from_mod_json.items() for module in modules + ] + if len(uninstallable_mods) == 1: + log.info(f"Was unable to reinstall {uninstallable_mods[0]}. Removing 'modules.json' entry") + else: + log.info( + f"Was unable to reinstall some modules. Removing 'modules.json' entries: {', '.join(uninstallable_mods)}" + ) + + for repo, modules in remove_from_mod_json.items(): + for module in modules: + fresh_mod_json["repos"][repo].pop(module) + if len(fresh_mod_json["repos"][repo]) == 0: + fresh_mod_json["repos"].pop(repo) + + # If some modules didn't have an entry in the 'modules.json' file + # we try to determine the SHA from the commit log of the remote + if sum(map(len, missing_from_modules_json.values())) > 0: + + format_missing = [ + f"'{repo}/{module}'" for repo, modules in missing_from_modules_json.items() for module in modules + ] + if len(format_missing) == 1: + log.info(f"Recomputing commit SHA for module {format_missing[0]} which was missing from 'modules.json'") + else: + log.info( + f"Recomputing commit SHAs for modules which which were were missing from 'modules.json': {', '.join(format_missing)}" + ) + failed_to_find_commit_sha = [] + for repo, modules in missing_from_modules_json.items(): + modules_repo = ModulesRepo(repo=repo) + repo_path = os.path.join(self.dir, "modules", repo) + for module in modules: + module_path = os.path.join(repo_path, module) + try: + correct_commit_sha = nf_core.modules.module_utils.find_correct_commit_sha( + module, module_path, modules_repo + ) + if repo not in fresh_mod_json["repos"]: + fresh_mod_json["repos"][repo] = {} + + fresh_mod_json["repos"][repo][module] = {"git_sha": correct_commit_sha} + except (LookupError, UserWarning) as e: + failed_to_find_commit_sha.append(f"'{repo}/{module}'") + + if len(failed_to_find_commit_sha) > 0: + + def _s(some_list): + return "" if len(some_list) == 1 else "s" + + log.info( + f"Could not determine 'git_sha' for module{_s(failed_to_find_commit_sha)}: '{', '.join(failed_to_find_commit_sha)}'." + f"\nPlease try to install a newer version of {'this' if len(failed_to_find_commit_sha) == 1 else 'these'} module{_s(failed_to_find_commit_sha)}." + ) + + self.dump_modules_json(fresh_mod_json) + def clear_module_dir(self, module_name, module_dir): """Removes all files in the module directory""" try: @@ -134,7 +262,7 @@ def download_module_file(self, module_name, module_version, modules_repo, instal dl_filename = os.path.join(self.dir, "modules", *install_folder, *split_filename[1:]) try: self.modules_repo.download_gh_file(dl_filename, api_url) - except SystemError as e: + except (SystemError, LookupError) as e: log.error(e) return False log.info("Downloaded {} files to {}".format(len(files), module_dir)) @@ -160,6 +288,9 @@ def update_modules_json(self, modules_json, repo_name, module_name, module_versi def dump_modules_json(self, modules_json): modules_json_path = os.path.join(self.dir, "modules.json") + # Sort the 'modules.json' repo entries + modules_json["repos"] = nf_core.utils.sort_dictionary(modules_json["repos"]) + with open(modules_json_path, "w") as fh: json.dump(modules_json, fh, indent=4) diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 4b6e5c6bb2..d6d2871ecd 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -29,7 +29,6 @@ def __init__(self, repo="nf-core/modules", branch="master"): self.owner, self.repo = self.name.split("/") self.modules_file_tree = {} - self.modules_current_hash = None self.modules_avail_module_names = [] def verify_modules_repo(self): @@ -65,7 +64,6 @@ def get_modules_file_tree(self): Fetch the file list from the repo, using the GitHub API Sets self.modules_file_tree - self.modules_current_hash self.modules_avail_module_names """ api_url = "https://api.github.com/repos/{}/git/trees/{}?recursive=1".format(self.name, self.branch) @@ -80,7 +78,6 @@ def get_modules_file_tree(self): result = r.json() assert result["truncated"] == False - self.modules_current_hash = result["sha"] self.modules_file_tree = result["tree"] for f in result["tree"]: if f["path"].startswith(f"modules/") and f["path"].endswith("/main.nf") and "/test/" not in f["path"]: @@ -121,7 +118,7 @@ def get_module_file_urls(self, module, commit=""): results[f["path"]] = f["url"] if commit != "": for path in results: - results[path] = f"https://api.github.com/repos/nf-core/modules/contents/{path}?ref={commit}" + results[path] = f"https://api.github.com/repos/{self.name}/contents/{path}?ref={commit}" return results def download_gh_file(self, dl_filename, api_url): diff --git a/nf_core/modules/remove.py b/nf_core/modules/remove.py index 186d958182..f657c361a3 100644 --- a/nf_core/modules/remove.py +++ b/nf_core/modules/remove.py @@ -61,8 +61,7 @@ def remove(self, module): # Verify that the module is actually installed if not os.path.exists(module_dir): - log.error("Module directory is not installed: {}".format(module_dir)) - log.info("The module you want to remove does not seem to be installed") + log.error(f"Module directory does not exist: '{module_dir}'") modules_json = self.load_modules_json() if self.modules_repo.name in modules_json["repos"] and module in modules_json["repos"][repo_name]: diff --git a/nf_core/modules/update.py b/nf_core/modules/update.py new file mode 100644 index 0000000000..2bddfec763 --- /dev/null +++ b/nf_core/modules/update.py @@ -0,0 +1,241 @@ +import os +import questionary +import logging + +import nf_core.utils +import nf_core.modules.module_utils + +from .modules_command import ModuleCommand +from .module_utils import get_installed_modules, get_module_git_log, module_exist_in_repo +from .modules_repo import ModulesRepo + +log = logging.getLogger(__name__) + + +class ModuleUpdate(ModuleCommand): + def __init__(self, pipeline_dir, force=False, prompt=False, sha=None, update_all=False): + super().__init__(pipeline_dir) + self.force = force + self.prompt = prompt + self.sha = sha + self.update_all = update_all + + def update(self, module): + if self.repo_type == "modules": + log.error("You cannot update a module in a clone of nf-core/modules") + return False + # Check whether pipelines is valid + if not self.has_valid_directory(): + return False + + # Verify that 'modules.json' is consistent with the installed modules + self.modules_json_up_to_date() + + tool_config = nf_core.utils.load_tools_config() + update_config = tool_config.get("update", {}) + if not self.update_all and module is None: + choices = ["All modules", "Named module"] + self.update_all = ( + questionary.select( + "Update all modules or a single named module?", + choices=choices, + style=nf_core.utils.nfcore_question_style, + ).ask() + == "All modules" + ) + + if self.prompt and self.sha is not None: + log.error("Cannot use '--sha' and '--prompt' at the same time!") + return False + + # Verify that the provided SHA exists in the repo + if self.sha: + try: + nf_core.modules.module_utils.sha_exists(self.sha, self.modules_repo) + except UserWarning: + log.error(f"Commit SHA '{self.sha}' doesn't exist in '{self.modules_repo.name}'") + return False + except LookupError as e: + log.error(e) + return False + + if not self.update_all: + # Get the available modules + try: + self.modules_repo.get_modules_file_tree() + except LookupError as e: + log.error(e) + return False + + # Check if there are any modules installed from + repo_name = self.modules_repo.name + if repo_name not in self.module_names: + log.error(f"No modules installed from '{repo_name}'") + return False + + if module is None: + self.get_pipeline_modules() + module = questionary.autocomplete( + "Tool name:", + choices=self.module_names[repo_name], + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + + # Check if module is installed before trying to update + if module not in self.module_names[repo_name]: + log.error(f"Module '{module}' is not installed in pipeline and could therefore not be updated") + return False + + sha = self.sha + if module in update_config.get(self.modules_repo.name, {}): + config_entry = update_config[self.modules_repo.name].get(module) + if config_entry is not None and config_entry is not True: + if config_entry is False: + log.info("Module's update entry in '.nf-core.yml' is set to False") + return False + elif isinstance(config_entry, str): + if self.sha: + log.warning( + "Found entry in '.nf-core.yml' for module " + "which will override version specified with '--sha'" + ) + sha = config_entry + else: + log.error("Module's update entry in '.nf-core.yml' is of wrong type") + return False + + # Check that the supplied name is an available module + if module and module not in self.modules_repo.modules_avail_module_names: + log.error("Module '{}' not found in list of available modules.".format(module)) + log.info("Use the command 'nf-core modules list remote' to view available software") + return False + + repos_mods_shas = [(self.modules_repo, module, sha)] + + else: + if module: + raise UserWarning("You cannot specify a module and use the '--all' flag at the same time") + + self.get_pipeline_modules() + + # Filter out modules that should not be updated or assign versions if there are any + skipped_repos = [] + skipped_modules = [] + repos_mods_shas = {} + for repo_name, modules in self.module_names.items(): + if repo_name not in update_config or update_config[repo_name] is True: + repos_mods_shas[repo_name] = [] + for module in modules: + repos_mods_shas[repo_name].append((module, self.sha)) + elif isinstance(update_config[repo_name], dict): + repo_config = update_config[repo_name] + repos_mods_shas[repo_name] = [] + for module in modules: + if module not in repo_config or repo_config[module] is True: + repos_mods_shas[repo_name].append((module, self.sha)) + elif isinstance(repo_config[module], str): + # If a string is given it is the commit SHA to which we should update to + custom_sha = repo_config[module] + repos_mods_shas[repo_name].append((module, custom_sha)) + else: + # Otherwise the entry must be 'False' and we should ignore the module + skipped_modules.append(f"{repo_name}/{module}") + elif isinstance(update_config[repo_name], str): + # If a string is given it is the commit SHA to which we should update to + custom_sha = update_config[repo_name] + repos_mods_shas[repo_name] = [] + for module in modules: + repos_mods_shas[repo_name].append((module, custom_sha)) + else: + skipped_repos.append(repo_name) + if skipped_repos: + skipped_str = "', '".join(skipped_repos) + log.info(f"Skipping modules in repositor{'y' if len(skipped_repos) == 1 else 'ies'}: '{skipped_str}'") + + if skipped_modules: + skipped_str = "', '".join(skipped_modules) + log.info(f"Skipping module{'' if len(skipped_modules) == 1 else 's'}: '{skipped_str}'") + + repos_mods_shas = [ + (ModulesRepo(repo=repo_name), mods_shas) for repo_name, mods_shas in repos_mods_shas.items() + ] + + for repo, _ in repos_mods_shas: + repo.get_modules_file_tree() + + # Flatten the list + repos_mods_shas = [(repo, mod, sha) for repo, mods_shas in repos_mods_shas for mod, sha in mods_shas] + + # Load 'modules.json' + modules_json = self.load_modules_json() + if not modules_json: + return False + + exit_value = True + for modules_repo, module, sha in repos_mods_shas: + if not module_exist_in_repo(module, modules_repo): + warn_msg = f"Module '{module}' not found in remote '{modules_repo.name}' ({modules_repo.branch})" + if self.update_all: + warn_msg += ". Skipping..." + log.warning(warn_msg) + exit_value = False + continue + + if modules_repo.name in modules_json["repos"]: + current_entry = modules_json["repos"][modules_repo.name].get(module) + else: + current_entry = None + + # Set the install folder based on the repository name + install_folder = [modules_repo.owner, modules_repo.repo] + + # Compute the module directory + module_dir = os.path.join(self.dir, "modules", *install_folder, module) + + if sha: + version = sha + elif self.prompt: + try: + version = nf_core.modules.module_utils.prompt_module_version_sha( + module, + modules_repo=modules_repo, + installed_sha=current_entry["git_sha"] if not current_entry is None else None, + ) + except SystemError as e: + log.error(e) + exit_value = False + continue + else: + # Fetch the latest commit for the module + try: + git_log = get_module_git_log(module, modules_repo=modules_repo, per_page=1, page_nbr=1) + except UserWarning: + log.error(f"Was unable to fetch version of module '{module}'") + exit_value = False + continue + version = git_log[0]["git_sha"] + + if current_entry is not None and not self.force: + # Fetch the latest commit for the module + current_version = current_entry["git_sha"] + if current_version == version: + if self.sha or self.prompt: + log.info(f"'{modules_repo.name}/{module}' is already installed at {version}") + else: + log.info(f"'{modules_repo.name}/{module}' is already up to date") + continue + + log.info(f"Updating '{modules_repo.name}/{module}'") + log.debug(f"Updating module '{module}' to {version} from {modules_repo.name}") + + log.debug(f"Removing old version of module '{module}'") + self.clear_module_dir(module, module_dir) + + # Download module files + if not self.download_module_file(module, version, modules_repo, install_folder, module_dir): + exit_value = False + continue + + # Update module.json with newly installed module + self.update_modules_json(modules_json, modules_repo.name, module, version) + return exit_value diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index afb20bb1ec..95549501a2 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -11,6 +11,9 @@ indent_style = space [*.{yml,yaml}] indent_size = 2 +[*.json] +insert_final_newline = unset + # These files are edited and tested upstream in nf-core/modules [/modules/nf-core/**] charset = unset diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index e416773357..bf43ef3fd4 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -19,7 +19,7 @@ If you'd like to write some code for {{ name }}, the standard workflow is as fol * If there isn't one already, please create one so that others know you're working on this 2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [{{ name }} repository](https://github.com/{{ name }}) to your GitHub account 3. Make the necessary changes / additions within your forked repository following [Pipeline conventions](#pipeline-contribution-conventions) -4. Use `nf-core schema build .` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). +4. Use `nf-core schema build` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). 5. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged If you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/). @@ -69,7 +69,7 @@ If you wish to contribute a new step, please use the following coding standards: 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new flags/options to `nextflow.config` with a default (see below). -5. Add any new flags/options to `nextflow_schema.json` with help text (with `nf-core schema build .`). +5. Add any new flags/options to `nextflow_schema.json` with help text (with `nf-core schema build`). 6. Add any new flags/options to the help message (for integer/text parameters, print to help the corresponding `nextflow.config` parameter). 7. Add sanity checks for all relevant parameters. 8. Add any new software to the `scrape_software_versions.py` script in `bin/` and the version command to the `scrape_software_versions` process in `main.nf`. @@ -83,7 +83,7 @@ If you wish to contribute a new step, please use the following coding standards: Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. -Once there, use `nf-core schema build .` to add to `nextflow_schema.json`. +Once there, use `nf-core schema build` to add to `nextflow_schema.json`. ### Default processes resource requirements diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 1d4271a164..230705bca7 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -18,7 +18,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t - [ ] If you've fixed a bug or added code that should be tested, add tests! - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. -- [ ] Make sure your code lints (`nf-core lint .`). +- [ ] Make sure your code lints (`nf-core lint`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). - [ ] Usage Documentation in `docs/usage.md` is updated. - [ ] Output Documentation in `docs/output.md` is updated. diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index afe4197c38..15c49c860d 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -53,7 +53,7 @@ jobs: - uses: actions/setup-node@v1 with: - node-version: "10" + node-version: '10' - name: Install editorconfig-checker run: npm install -g editorconfig-checker diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index c8763e132a..c502f2ea50 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -33,7 +33,7 @@ On release, automated continuous integration tests run the pipeline on a full-si ## Quick Start -1. Install [`Nextflow`](https://nf-co.re/usage/installation) (`>=21.04.0`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ diff --git a/nf_core/pipeline-template/bin/scrape_software_versions.py b/nf_core/pipeline-template/bin/scrape_software_versions.py index c55be87266..241dc8b7a6 100755 --- a/nf_core/pipeline-template/bin/scrape_software_versions.py +++ b/nf_core/pipeline-template/bin/scrape_software_versions.py @@ -30,7 +30,7 @@ print("
{}
{}
".format(k, v)) print(" ") -# Write out regexes as csv file: +# Write out as tsv file: with open("software_versions.tsv", "w") as f: for k, v in sorted(results.items()): f.write("{}\t{}\n".format(k, v)) diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy index b6e689ec88..44551e0a35 100755 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy @@ -24,17 +24,21 @@ class NfcoreTemplate { public static void hostName(workflow, params, log) { Map colors = logColours(params.monochrome_logs) if (params.hostnames) { - def hostname = "hostname".execute().text.trim() - params.hostnames.each { prof, hnames -> - hnames.each { hname -> - if (hostname.contains(hname) && !workflow.profile.contains(prof)) { - log.info "=${colors.yellow}====================================================${colors.reset}=\n" + - "${colors.yellow}WARN: You are running with `-profile $workflow.profile`\n" + - " but your machine hostname is ${colors.white}'$hostname'${colors.reset}.\n" + - " ${colors.yellow_bold}Please use `-profile $prof${colors.reset}`\n" + - "=${colors.yellow}====================================================${colors.reset}=" + try { + def hostname = "hostname".execute().text.trim() + params.hostnames.each { prof, hnames -> + hnames.each { hname -> + if (hostname.contains(hname) && !workflow.profile.contains(prof)) { + log.info "=${colors.yellow}====================================================${colors.reset}=\n" + + "${colors.yellow}WARN: You are running with `-profile $workflow.profile`\n" + + " but your machine hostname is ${colors.white}'$hostname'${colors.reset}.\n" + + " ${colors.yellow_bold}Please use `-profile $prof${colors.reset}`\n" + + "=${colors.yellow}====================================================${colors.reset}=" + } } } + } catch (Exception e) { + log.warn "[$workflow.manifest.name] Could not determine 'hostname' - skipping check. Reason: ${e.message}." } } } diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index cb7d707523..aacda0004c 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -18,7 +18,7 @@ "type": "string", "format": "file-path", "mimetype": "text/csv", - "pattern": "\\.csv$", + "pattern": "^\\S+\\.csv$", "schema": "assets/schema_input.json", "description": "Path to comma-separated file containing information about the samples in the experiment.", "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row. See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).", @@ -60,7 +60,7 @@ "type": "string", "format": "file-path", "mimetype": "text/plain", - "pattern": "\\.fn?a(sta)?(\\.gz)?$", + "pattern": "^\\S+\\.fn?a(sta)?(\\.gz)?$", "description": "Path to FASTA genome file.", "help_text": "This parameter is *mandatory* if `--genome` is not specified. If you don't have a BWA index available this will be generated for you automatically. Combine with `--save_reference` to save BWA index for future runs.", "fa_icon": "far fa-file-code" diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index a7138c014e..fe1882b420 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -130,7 +130,9 @@ workflow {{ short_name|upper }} { */ workflow.onComplete { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) + if (params.email || params.email_on_fail) { + NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) + } NfcoreTemplate.summary(workflow, params, log) } diff --git a/nf_core/utils.py b/nf_core/utils.py index 29ebc9755d..de120f10d7 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -771,5 +771,18 @@ def load_tools_config(dir="."): except FileNotFoundError: log.debug(f"No tools config file found: {config_fn}") return {} - + if tools_config is None: + # If the file is empty + return {} return tools_config + + +def sort_dictionary(d): + """Sorts a nested dictionary recursively""" + result = dict() + for k, v in sorted(d.items()): + if isinstance(v, dict): + result[k] = sort_dictionary(v) + else: + result[k] = v + return result diff --git a/setup.py b/setup.py index 4f16b75a60..5be47e737a 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -version = "2.0.1" +version = "2.1" with open("README.md") as f: readme = f.read() diff --git a/tests/modules/lint.py b/tests/modules/lint.py index f96e0af0a5..de29371c58 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -7,7 +7,7 @@ def test_modules_lint_trimgalore(self): module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) module_lint.lint(print_results=False, module="trimgalore") assert len(module_lint.passed) > 0 - assert len(module_lint.warned) == 0 + assert len(module_lint.warned) >= 0 assert len(module_lint.failed) == 0 diff --git a/tests/test_download.py b/tests/test_download.py index c03cda11f0..a4ae8e205d 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -24,13 +24,13 @@ def test_get_release_hash_release(self): wfs = nf_core.list.Workflows() wfs.get_remote_workflows() pipeline = "methylseq" - download_obj = DownloadWorkflow(pipeline=pipeline, release="1.6") + download_obj = DownloadWorkflow(pipeline=pipeline, revision="1.6") ( download_obj.pipeline, - download_obj.wf_releases, + download_obj.wf_revisions, download_obj.wf_branches, ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) - download_obj.get_release_hash() + download_obj.get_revision_hash() assert download_obj.wf_sha == "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" assert download_obj.outdir == "nf-core-methylseq-1.6" assert ( @@ -43,13 +43,13 @@ def test_get_release_hash_branch(self): wfs.get_remote_workflows() # Exoseq pipeline is archived, so `dev` branch should be stable pipeline = "exoseq" - download_obj = DownloadWorkflow(pipeline=pipeline, release="dev") + download_obj = DownloadWorkflow(pipeline=pipeline, revision="dev") ( download_obj.pipeline, - download_obj.wf_releases, + download_obj.wf_revisions, download_obj.wf_branches, ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) - download_obj.get_release_hash() + download_obj.get_revision_hash() assert download_obj.wf_sha == "819cbac792b76cf66c840b567ed0ee9a2f620db7" assert download_obj.outdir == "nf-core-exoseq-dev" assert ( @@ -62,20 +62,20 @@ def test_get_release_hash_non_existent_release(self): wfs = nf_core.list.Workflows() wfs.get_remote_workflows() pipeline = "methylseq" - download_obj = DownloadWorkflow(pipeline=pipeline, release="thisisfake") + download_obj = DownloadWorkflow(pipeline=pipeline, revision="thisisfake") ( download_obj.pipeline, - download_obj.wf_releases, + download_obj.wf_revisions, download_obj.wf_branches, ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) - download_obj.get_release_hash() + download_obj.get_revision_hash() # # Tests for 'download_wf_files' # def test_download_wf_files(self): outdir = tempfile.mkdtemp() - download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", release="1.6") + download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", revision="1.6") download_obj.outdir = outdir download_obj.wf_sha = "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" download_obj.wf_download_url = ( @@ -89,7 +89,7 @@ def test_download_wf_files(self): # def test_download_configs(self): outdir = tempfile.mkdtemp() - download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", release="1.6") + download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", revision="1.6") download_obj.outdir = outdir download_obj.download_configs() assert os.path.exists(os.path.join(outdir, "configs", "nfcore_custom.config")) @@ -106,7 +106,7 @@ def test_wf_use_local_configs(self): create_obj.init_pipeline() test_outdir = tempfile.mkdtemp() - download_obj = DownloadWorkflow(pipeline="dummy", release="1.2.0", outdir=test_outdir) + download_obj = DownloadWorkflow(pipeline="dummy", revision="1.2.0", outdir=test_outdir) shutil.copytree(test_pipeline_dir, os.path.join(test_outdir, "workflow")) download_obj.download_configs() @@ -192,7 +192,7 @@ def test_download_workflow_with_success(self, mock_download_image, mock_singular pipeline="nf-core/methylseq", outdir=os.path.join(tmp_dir, "new"), container="singularity", - release="1.6", + revision="1.6", compress_type="none", ) diff --git a/tests/test_modules.py b/tests/test_modules.py index 7ebaf4e917..2401dfa763 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -39,8 +39,8 @@ def setUp(self): # Set up install objects print("Setting up install objects") - self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, latest=True, force=True) - self.mods_install_alt = nf_core.modules.ModuleInstall(self.pipeline_dir, latest=True, force=True) + self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + self.mods_install_alt = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=True, force=True) # TODO Remove comments once external repository to have same structure as nf-core/modules # self.mods_install_alt.modules_repo = nf_core.modules.ModulesRepo(repo="ewels/nf-core-modules", branch="master")