diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 71411be1b9..9dbd7a1f6b 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -6,7 +6,7 @@ These are the most common things requested on pull requests (PRs). Remember that PRs should be made against the dev branch, unless you're preparing a release. -Learn more about contributing: https://github.com/nf-core/tools/tree/master/.github/CONTRIBUTING.md +Learn more about contributing: https://github.com/nf-core/tools/tree/main/.github/CONTRIBUTING.md --> ## PR checklist diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index 9a1905c7a0..d64799382f 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -6,14 +6,13 @@ 4. Check that modules/subworkflows in template are up to date with the latest releases 5. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:latest`. 6. Make sure all CI tests are passing! -7. Create a PR from `dev` to `master` -8. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) +7. Create a PR from `dev` to `main` +8. Make sure all CI tests are passing again (additional tests are run on PRs to `main`) 9. Request review (2 approvals required) -10. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) -11. Merge the PR into `master` -12. Wait for CI tests on the commit to passed -13. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. -14. Create a new release copying the `CHANGELOG` for that release into the description section. +10. Merge the PR into `main` +11. Wait for CI tests on the commit to passed +12. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. +13. Create a new release copying the `CHANGELOG` for that release into the description section. ## After release @@ -21,3 +20,4 @@ 2. Check that the automatic `PyPi` deployment has worked: [pypi.org/project/nf-core](https://pypi.org/project/nf-core/) 3. Check `BioConda` has an automated PR to bump the version, and merge. eg. [bioconda/bioconda-recipes #20065](https://github.com/bioconda/bioconda-recipes/pull/20065) 4. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:dev`. +5. Run `rich-codex` on the [tools/website repo](https://github.com/nf-core/website/actions/workflows/rich-codex.yml) to regenerate docs screengrabs (actions `workflow_dispatch` button) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 8760901db1..dc24546146 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -53,12 +53,24 @@ runs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + shell: bash + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX shell: bash run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + shell: bash + run: touch nf-core-testpipeline/ro-crate-metadata.json + working-directory: create-lint-wf + # Run nf-core pipelines linting - name: nf-core pipelines lint shell: bash diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index bbac1cc6ff..9775c70e10 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -1,15 +1,15 @@ name: nf-core branch protection -# This workflow is triggered on PRs to master branch on the repository -# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +# This workflow is triggered on PRs to main branch on the repository +# It fails when someone tries to make a PR against the nf-core `main` branch instead of `dev` on: pull_request_target: - branches: [master] + branches: [main] jobs: test: runs-on: ubuntu-latest steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches + # PRs to the nf-core repo main branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - name: Check PRs if: github.repository == 'nf-core/tools' run: | @@ -21,7 +21,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `master` branch :x: + ## This PR is against the `main` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -31,9 +31,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. - The `master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main` branch. + The `main` branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to `main` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 7e90febb74..e0b4c67cfc 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -38,7 +38,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "24.04.2" - "latest-everything" steps: - name: go to subdirectory and change nextflow workdir @@ -78,6 +78,11 @@ jobs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Run the other nf-core commands - name: nf-core pipelines list run: nf-core --log-file log.txt pipelines list diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 85ce2230ab..3a27cd5e4e 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -44,7 +44,7 @@ jobs: - name: Create Matrix id: create_matrix run: | - echo "matrix=$(yq 'keys | filter(. != "github") | filter(. != "is_nfcore") | filter(. != "test_config") | tojson(0)' nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT + echo "matrix=$(yq 'keys | filter(. != "github") | filter(. != "is_nfcore") | filter(. != "test_config") | tojson(0)' nf_core/pipelines/create/template_features.yml)" >> $GITHUB_OUTPUT RunTestWorkflow: runs-on: ${{ matrix.runner }} @@ -112,6 +112,9 @@ jobs: run: | cd create-test-lint-wf nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml template_skip_${{ matrix.TEMPLATE }}.yml + # fake ro-crate + touch my-prefix-testpipeline/ro-crate-metadata.json + git commit -am "add ro-crate" - name: run the pipeline run: | @@ -137,11 +140,21 @@ jobs: run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-test-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find my-prefix-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-test-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-test-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + run: touch my-prefix-testpipeline/ro-crate-metadata.json + working-directory: create-test-lint-wf + # Run nf-core linting - name: nf-core pipelines lint run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index a95a477459..782a08ac9f 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -39,7 +39,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "24.04.2" - "latest-everything" steps: - name: go to working directory @@ -71,11 +71,7 @@ jobs: mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" - # echo current directory - pwd - # echo content of current directory - ls -la - nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results + nextflow run nf-core-testpipeline -profile self_hosted_runner,test --outdir ./results - name: Upload log file artifact if: ${{ always() }} diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dc88031886..ae2df47e61 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -49,7 +49,7 @@ jobs: steps: - name: Check conditions id: conditions - run: echo "run-tests=${{ github.ref == 'refs/heads/master' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" + run: echo "run-tests=${{ github.ref == 'refs/heads/main' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" outputs: python-version: ${{ matrix.python-version }} @@ -142,18 +142,18 @@ jobs: exit 1 fi + - name: remove slashes from test name + run: | + test=$(echo ${{ matrix.test }} | sed 's/\//__/g') + echo "test=${test}" >> $GITHUB_ENV + - name: Store snapshot report uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 if: always() with: - name: Snapshot Report ${{ matrix.test }} + name: Snapshot Report ${{ env.test }} path: ./snapshot_report.html - - name: remove slashes from test name - run: | - test=$(echo ${{ matrix.test }} | sed 's/\//__/g') - echo "test=${test}" >> $GITHUB_ENV - - name: Upload coverage uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 with: diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml deleted file mode 100644 index cd12b139d3..0000000000 --- a/.github/workflows/rich-codex.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Generate images for docs -on: - workflow_dispatch: -jobs: - rich_codex: - runs-on: ubuntu-latest - steps: - - name: Check out the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 - with: - python-version: 3.x - cache: pip - cache-dependency-path: setup.py - - - name: Install Nextflow - uses: nf-core/setup-nextflow@v2 - - - name: Install nf-test - uses: nf-core/setup-nf-test@v1 - - - name: Install nf-core/tools - run: pip install git+https://github.com/nf-core/tools.git@dev - - - name: Generate terminal images with rich-codex - uses: ewels/rich-codex@8ce988cc253c240a3027ba58e33e47640935dd8b # v1 - env: - COLUMNS: 100 - HIDE_PROGRESS: "true" - with: - commit_changes: "true" - clean_img_paths: docs/images/*.svg - terminal_width: 100 - before_command: > - which nextflow && - which nf-core && - nextflow -version && - nf-core --version diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 55880e8130..ea815a219a 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -1,7 +1,7 @@ name: Sync template on: - release: - types: [published] + # release: + # types: [published] workflow_dispatch: inputs: testpipeline: @@ -19,6 +19,10 @@ on: description: "Force a PR to be created" type: boolean default: false + pipeline: + description: "Pipeline to sync" + type: string + default: "all" # Cancel if a newer run is started concurrency: @@ -35,6 +39,14 @@ jobs: run: | if [ "${{ github.event.inputs.testpipeline }}" == "true" ]; then echo '{"pipeline":["testpipeline"]}' > pipeline_names.json + elif [ "${{ github.event.inputs.pipeline }}" != "all" ] && [ "${{ github.event.inputs.pipeline }}" != "" ]; then + curl -O https://nf-co.re/pipeline_names.json + # check if the pipeline exists + if ! grep -q "\"${{ github.event.inputs.pipeline }}\"" pipeline_names.json; then + echo "Pipeline ${{ github.event.inputs.pipeline }} does not exist" + exit 1 + fi + echo '{"pipeline":["${{ github.event.inputs.pipeline }}"]}' > pipeline_names.json else curl -O https://nf-co.re/pipeline_names.json fi @@ -82,7 +94,7 @@ jobs: run: | git config --global user.email "core@nf-co.re" git config --global user.name "nf-core-bot" - nf-core --log-file sync_log_${{ matrix.pipeline }}.txt sync -d nf-core/${{ matrix.pipeline }} \ + nf-core --log-file sync_log_${{ matrix.pipeline }}.txt pipelines sync -d nf-core/${{ matrix.pipeline }} \ --from-branch dev \ --pull-request \ --username nf-core-bot \ diff --git a/.github/workflows/update-textual-snapshots.yml b/.github/workflows/update-textual-snapshots.yml index fb936762f8..2adb1b8d81 100644 --- a/.github/workflows/update-textual-snapshots.yml +++ b/.github/workflows/update-textual-snapshots.yml @@ -46,7 +46,7 @@ jobs: - name: Run pytest to update snapshots id: pytest run: | - python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --durations=0 + python3 -m pytest tests/pipelines/test_create_app.py --snapshot-update --color=yes --durations=0 continue-on-error: true # indication that the run has finished diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f08d8419d..67aa3204c4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.0 + rev: v0.6.9 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix @@ -13,13 +13,13 @@ repos: - prettier@3.3.3 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.11.1" + rev: "v1.11.2" hooks: - id: mypy additional_dependencies: diff --git a/CHANGELOG.md b/CHANGELOG.md index 99e2f2427d..364a079a70 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,55 @@ # nf-core/tools: Changelog -## v3.0.0dev +## v3.0.3dev + +### Template + +- Keep pipeline name in version.yml file ([#3223](https://github.com/nf-core/tools/pull/3223)) +- Fix Manifest DOI text ([#3224](https://github.com/nf-core/tools/pull/3224)) +- Do not assume pipeline name is url ([#3225](https://github.com/nf-core/tools/pull/3225)) + +### Download + +### Linting + +### Modules + +### Subworkflows + +### General + +- Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) +- create: add shortcut to toggle all switches ([#3226](https://github.com/nf-core/tools/pull/3226)) + +## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] + +### Template + +- Add null/ to .gitignore ([#3191](https://github.com/nf-core/tools/pull/3191)) +- Parallelize pipeline GHA tests over docker/conda/singularity ([#3214](https://github.com/nf-core/tools/pull/3214)) +- Fix `template_version_comment.yml` github action ([#3212](https://github.com/nf-core/tools/pull/3212)) +- Fix pre-commit linting on pipeline template ([#3218](https://github.com/nf-core/tools/pull/3218)) + +### Linting + +- Fix bug when linting schema params and when using `defaultIgnoreParams` ([#3213](https://github.com/nf-core/tools/pull/3213)) + +### General + +- Use updated pipeline commands in docstrings ([#3215](https://github.com/nf-core/tools/pull/3215)) +- Disable automatic sync on release, fix handling empty pipeline input ([#3217](https://github.com/nf-core/tools/pull/3217)) + +## [v3.0.1 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.1) - [2024-10-09] + +### Template + +- Fixed an issue where the linting CI action didn't read the correct file ([#3202](https://github.com/nf-core/tools/pull/3202)) +- Fixed condition for `awsfulltest` to run ([#3203](https://github.com/nf-core/tools/pull/3203)) +- Fix too many empty lines added by jinja ([#3204](https://github.com/nf-core/tools/pull/3204) and [#3206](https://github.com/nf-core/tools/pull/3206)) +- Fix header blocks in local subworkflow including git merge marker-like strings ([#3201](https://github.com/nf-core/tools/pull/3201)) +- Update included subworkflows and modules ([#3208](https://github.com/nf-core/tools/pull/3208)) + +## [v3.0.0 - Titanium Tapir](https://github.com/nf-core/tools/releases/tag/3.0.0) - [2024-10-08] **Highlights** @@ -9,10 +58,10 @@ - A new Text User Interface app when running `nf-core pipelines create` to help us guide you through the process better (no worries, you can still use the cli if you give all values as parameters) - We replaced nf-validation with nf-schema in the pipeline template - CI tests now lint with the nf-core tools version matching the template version of the pipeline, to minimise errors in opened PRs with every new tools release. -- New command `nf-core pipelines ro-crate` to create a [Research Object (RO) crate](https://www.researchobject.org/ro-crate/) for a pipeline - `nf-core licences` command is deprecated. -- The structure of nf-core/tools pytests has been updated -- The structure of the API docs has been updated +- Changed default branch to `main`. +- The structure of nf-core/tools pytests has been updated. +- The structure of the API docs has been updated. ### Template @@ -23,17 +72,27 @@ - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) - handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108), [#3112](https://github.com/nf-core/tools/pull/3112)) - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) -- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101)) +- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101) and [#3169](https://github.com/nf-core/tools/pull/3169)) - add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) - add option to exclude codespaces from pipeline template ([#3105](https://github.com/nf-core/tools/pull/3105)) - add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) +- add option to exclude nf-schema from the template ([#3116](https://github.com/nf-core/tools/pull/3116)) - add option to exclude fastqc from pipeline template ([#3129](https://github.com/nf-core/tools/pull/3129)) - add option to exclude documentation from pipeline template ([#3130](https://github.com/nf-core/tools/pull/3130)) - add option to exclude test configs from pipeline template ([#3133](https://github.com/nf-core/tools/pull/3133)) - add option to exclude tower.yml from pipeline template ([#3134](https://github.com/nf-core/tools/pull/3134)) +- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) +- test pipeline with conda and singularity on PRs to master ([#3149](https://github.com/nf-core/tools/pull/3149)) +- run nf-core lint `--release` on PRs to master ([#3148](https://github.com/nf-core/tools/pull/3148)) +- Add tests to ensure all files are part of a template customisation group and all groups are tested ([#3099](https://github.com/nf-core/tools/pull/3099)) +- Update the syntax of `utils_nfcore_pipeline_pipeline` local subworkflow ([#3166](https://github.com/nf-core/tools/pull/3166)) +- Remove if/else block to include `igenomes.config` ([#3168](https://github.com/nf-core/tools/pull/3168)) +- Fixed release announcement hashtags for Mastodon ([#3099](https://github.com/nf-core/tools/pull/3176)) +- Remove try/catch blocks from `nextflow.config` ([#3167](https://github.com/nf-core/tools/pull/3167)) +- Extend `download_pipeline.yml` to count pre-downloaded container images. ([#3182](https://github.com/nf-core/tools/pull/3182)) ### Linting @@ -42,25 +101,44 @@ - Remove defaults from conda `environment.yml` file. ([#3029](https://github.com/nf-core/tools/pull/3029)) - Restructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) - Fix module linting warning for process_high_memory ([#3086](https://github.com/nf-core/tools/issues/3086)) +- Linting will now fail when an unpinned plugin is used ([#3116](https://github.com/nf-core/tools/pull/3116)) +- Linting will now check if the schema is correct for the used validation plugin ([#3116])(https://github.com/nf-core/tools/pull/3116) +- Linting will now check the use of the right validation plugin include statements in the workflow scripts ([#3116])(https://github.com/nf-core/tools/pull/3116) +- Full linting for correct use of nf-schema and nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) +- Handle cases where the directory path contains the name of the component ([#3147](https://github.com/nf-core/tools/pull/3147)) +- Don't test conda `environment.yml` `name` attribute (which should no longer be there) ([#3161](https://github.com/nf-core/tools/pull/3161)) ### Pipeline create command -- Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) -- Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) -- Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) -- Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) +- Allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) +- Allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) ### Components - The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) - Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) -- Components: allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Add option `--fix` to update the `meta.yml` file of subworkflows ([#3077](https://github.com/nf-core/tools/pull/3077)) + +### Download + +- Fully removed already deprecated `-t` / `--tower` flag. +- Refactored the CLI for consistency (short flag is usually second word, e.g. also for `--container-library` etc.): + +| Old parameter | New parameter | +| --------------------------------- | --------------------------------- | +| `-d` / `--download-configuration` | `-c` / `--download-configuration` | +| `-p` / `--parallel-downloads` | `-d` / `--parallel-downloads` | +| new parameter | `-p` / (`--platform`) | ### General +- Change default branch to `main` for the nf-core/tools repository - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) +- Remove `rich-codex.yml` action, images are now generated on the website repo ([#2989](https://github.com/nf-core/tools/pull/2989)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) -- update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) - move pipeline subcommands for v3.0 ([#2983](https://github.com/nf-core/tools/pull/2983)) - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) @@ -74,6 +152,8 @@ - Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) - Add bot action to update textual snapshots and write bot documentation ([#3102](https://github.com/nf-core/tools/pull/3102)) - Update gitpod setup ([#3136](https://github.com/nf-core/tools/pull/3136)) +- fix syncing a pipeline from current directory ([#3143](https://github.com/nf-core/tools/pull/3143)) +- Patch gitpod conda setup to not use defaults channel ([#3159](https://github.com/nf-core/tools/pull/3159)) ## Version updates @@ -82,6 +162,7 @@ - Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) - Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) - Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) +- Update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f9773296c1..ce36354331 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -153,7 +153,7 @@ Optionally followed by the description that you want to add to the changelog. - Update Textual snapshots: -If the Textual snapshots (run by `tests/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. +If the Textual snapshots (run by `tests/pipelines/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words: ``` diff --git a/Dockerfile b/Dockerfile index fb1a867937..8269e95702 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:59c7332a4a24373861c4a5f0eec2c92b87e3efeb8ddef011744ef9a751b1d11c +FROM python:3.12-slim@sha256:af4e85f1cac90dd3771e47292ea7c8a9830abfabbe4faa5c53f158854c2e819d LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/MANIFEST.in b/MANIFEST.in index 2bec403804..ce2e08c090 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -10,4 +10,4 @@ include nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png include nf_core/assets/logo/placeholder_logo.svg include nf_core/assets/logo/MavenPro-Bold.ttf include nf_core/pipelines/create/create.tcss -include nf_core/pipelines/create/templatefeatures.yml +include nf_core/pipelines/create/template_features.yml diff --git a/README.md b/README.md index 58fb708a0d..8a3e7d05e6 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@

- - nf-core/tools + + nf-core/tools

-[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=master&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster) -[![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) +[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=main&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amain) +[![codecov](https://codecov.io/gh/nf-core/tools/branch/main/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) [![code style: prettier](https://img.shields.io/badge/code%20style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) [![code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff) @@ -21,7 +21,7 @@ For documentation of the internal Python functions, please refer to the [Tools P ## Installation -For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/usage/tools). +For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/nf-core-tools/installation). Below is a quick-start for those who know what they're doing: ### Bioconda diff --git a/docs/api/_src/api/utils.md b/docs/api/_src/api/utils.md new file mode 100644 index 0000000000..1353f97ef5 --- /dev/null +++ b/docs/api/_src/api/utils.md @@ -0,0 +1,9 @@ +# nf_core.utils + +```{eval-rst} +.. automodule:: nf_core.utils + :members: + :undoc-members: + :show-inheritance: + :private-members: +``` diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index bfdbd7888d..5a45483d9c 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -40,7 +40,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon"] +extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinxcontrib.autodoc_pydantic"] # Add any paths that contain templates here, relative to this directory. templates_path = ["./_templates"] @@ -51,8 +51,8 @@ # source_suffix = ['.rst', '.md'] source_suffix = ".rst" -# The master toctree document. -master_doc = "index" +# The main toctree document. +main_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -133,7 +133,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, "nf-core.tex", "nf-core tools API documentation", "Phil Ewels, Sven Fillinger", "manual"), + (main_doc, "nf-core.tex", "nf-core tools API documentation", "Phil Ewels, Sven Fillinger", "manual"), ] @@ -141,7 +141,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "nf-core", "nf-core tools API documentation", [author], 1)] +man_pages = [(main_doc, "nf-core", "nf-core tools API documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -151,7 +151,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + main_doc, "nf-core", "nf-core tools API documentation", author, diff --git a/docs/api/_src/pipeline_lint_tests/included_configs.md b/docs/api/_src/pipeline_lint_tests/included_configs.md new file mode 100644 index 0000000000..f68f7da25e --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/included_configs.md @@ -0,0 +1,5 @@ +# included_configs + + ```{eval-rst} + .. automethod:: nf_core.pipelines.lint.PipelineLint.included_configs + ``` diff --git a/docs/api/_src/pipeline_lint_tests/index.md b/docs/api/_src/pipeline_lint_tests/index.md index 3575c08db4..4dd93442d2 100644 --- a/docs/api/_src/pipeline_lint_tests/index.md +++ b/docs/api/_src/pipeline_lint_tests/index.md @@ -7,6 +7,7 @@ - [base_config](./base_config/) - [files_exist](./files_exist/) - [files_unchanged](./files_unchanged/) + - [included_configs](./included_configs/) - [merge_markers](./merge_markers/) - [modules_config](./modules_config/) - [modules_json](./modules_json/) @@ -16,6 +17,7 @@ - [nfcore_yml](./nfcore_yml/) - [pipeline_name_conventions](./pipeline_name_conventions/) - [pipeline_todos](./pipeline_todos/) + - [plugin_includes](./plugin_includes/) - [readme](./readme/) - [schema_description](./schema_description/) - [schema_lint](./schema_lint/) diff --git a/docs/api/_src/pipeline_lint_tests/plugin_includes.md b/docs/api/_src/pipeline_lint_tests/plugin_includes.md new file mode 100644 index 0000000000..48bddadc80 --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/plugin_includes.md @@ -0,0 +1,5 @@ +# plugin_includes + +```{eval-rst} +.. automethod:: nf_core.pipelines.lint.PipelineLint.plugin_includes +``` diff --git a/docs/api/requirements.txt b/docs/api/requirements.txt index abffe30740..1d23f0b27d 100644 --- a/docs/api/requirements.txt +++ b/docs/api/requirements.txt @@ -1,3 +1,4 @@ +autodoc_pydantic Sphinx>=3.3.1 sphinxcontrib-napoleon sphinx-markdown-builder diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 0efea13ec9..473ba96a5f 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -4,6 +4,7 @@ import logging import os import sys +from pathlib import Path import rich import rich.console @@ -35,6 +36,7 @@ pipelines_launch, pipelines_lint, pipelines_list, + pipelines_rocrate, pipelines_schema_build, pipelines_schema_docs, pipelines_schema_lint, @@ -85,7 +87,7 @@ }, { "name": "For developers", - "commands": ["create", "lint", "bump-version", "sync", "schema", "create-logo"], + "commands": ["create", "lint", "bump-version", "sync", "schema", "rocrate", "create-logo"], }, ], "nf-core modules": [ @@ -366,26 +368,18 @@ def command_pipelines_lint( help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -# TODO: Remove this in a future release. Deprecated in March 2024. -@click.option( - "-t", - "--tower", - is_flag=True, - default=False, - hidden=True, - help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", -) @click.option( + "-p", "--platform", is_flag=True, default=False, help="Download for Seqera Platform (formerly Nextflow Tower)", ) @click.option( - "-d", + "-c", "--download-configuration", - is_flag=True, - default=False, + type=click.Choice(["yes", "no"]), + default="no", help="Include configuration profiles in download. Not available with `--platform`", ) @click.option( @@ -420,7 +414,7 @@ def command_pipelines_lint( help="List of images already available in a remote `singularity.cacheDir`.", ) @click.option( - "-p", + "-d", "--parallel-downloads", type=int, default=4, @@ -434,7 +428,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -454,7 +447,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -579,6 +571,44 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): pipelines_list(ctx, keywords, sort, json, show_archived) +# nf-core pipelines rocrate +@pipelines.command("rocrate") +@click.argument( + "pipeline_dir", + type=click.Path(exists=True), + default=Path.cwd(), + required=True, + metavar="", +) +@click.option( + "-j", + "--json_path", + default=Path.cwd(), + type=str, + help="Path to save RO Crate metadata json file to", +) +@click.option("-z", "--zip_path", type=str, help="Path to save RO Crate zip file to") +@click.option( + "-pv", + "--pipeline_version", + type=str, + help="Version of pipeline to use for RO Crate", + default="", +) +@click.pass_context +def rocrate( + ctx, + pipeline_dir: str, + json_path: str, + zip_path: str, + pipeline_version: str, +): + """ + Make an Research Object Crate + """ + pipelines_rocrate(ctx, pipeline_dir, json_path, zip_path, pipeline_version) + + # nf-core pipelines sync @pipelines.command("sync") @click.pass_context @@ -1231,11 +1261,14 @@ def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile is_flag=True, help="Fix the module version if a newer version is available", ) -def command_modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_modules_lint( + ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix +): """ Lint one or more modules in a directory. """ - modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version) + modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix) # nf-core modules info @@ -1476,11 +1509,14 @@ def command_subworkflows_list_local(ctx, keywords, json, directory): # pylint: help="Sort lint output by subworkflow or test name.", show_default=True, ) -def command_subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_subworkflows_lint( + ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix +): """ Lint one or more subworkflows in a directory. """ - subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by) + subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix) # nf-core subworkflows info @@ -1696,7 +1732,7 @@ def command_schema_validate(pipeline, params): @click.option( "--url", type=str, - default="https://nf-co.re/pipeline_schema_builder", + default="https://oldsite.nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) def command_schema_build(directory, no_prompts, web_only, url): @@ -1819,6 +1855,7 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core sync (deprecated) @nf_core_cli.command("sync", hidden=True, deprecated=True) +@click.pass_context @click.option( "-d", "--dir", @@ -1849,14 +1886,14 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Use `nf-core pipelines sync` instead. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." ) - pipelines_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core bump-version (deprecated) @@ -2114,8 +2151,7 @@ def command_download( outdir, compress, force, - tower, - platform, + platform or tower, download_configuration, tag, container_system, diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 57c8e9777c..33b1f75160 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -261,7 +261,7 @@ def modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrat sys.exit(1) -def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix): """ Lint one or more modules in a directory. @@ -278,6 +278,7 @@ def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, p module_lint = ModuleLint( directory, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 4b6fa75f3e..9699dc53a3 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -2,6 +2,7 @@ import os import sys from pathlib import Path +from typing import Optional, Union import rich @@ -144,9 +145,12 @@ def pipelines_lint( ctx.obj["hide_progress"], ) swf_failed = 0 + module_failed = 0 if subworkflow_lint_obj is not None: swf_failed = len(subworkflow_lint_obj.failed) - if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: + if module_lint_obj is not None: + module_failed = len(module_lint_obj.failed) + if len(lint_obj.failed) + module_failed + swf_failed > 0: sys.exit(1) except AssertionError as e: log.critical(e) @@ -164,7 +168,6 @@ def pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -182,16 +185,13 @@ def pipelines_download( """ from nf_core.pipelines.download import DownloadWorkflow - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - dl = DownloadWorkflow( pipeline, revision, outdir, compress, force, - tower or platform, # True if either specified + platform, download_configuration, tag, container_system, @@ -278,6 +278,33 @@ def pipelines_list(ctx, keywords, sort, json, show_archived): stdout.print(list_workflows(keywords, sort, json, show_archived)) +# nf-core pipelines rocrate +def pipelines_rocrate( + ctx, + pipeline_dir: Union[str, Path], + json_path: Optional[Union[str, Path]], + zip_path: Optional[Union[str, Path]], + pipeline_version: str, +) -> None: + from nf_core.pipelines.rocrate import ROCrate + + if json_path is None and zip_path is None: + log.error("Either `--json_path` or `--zip_path` must be specified.") + sys.exit(1) + else: + pipeline_dir = Path(pipeline_dir) + if json_path is not None: + json_path = Path(json_path) + if zip_path is not None: + zip_path = Path(zip_path) + try: + rocrate_obj = ROCrate(pipeline_dir, pipeline_version) + rocrate_obj.create_rocrate(pipeline_dir, json_path=json_path, zip_path=zip_path) + except (UserWarning, LookupError, FileNotFoundError) as e: + log.error(e) + sys.exit(1) + + # nf-core pipelines sync def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index a32f8d5c3e..8e90a8116b 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -104,7 +104,7 @@ def subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable= sys.exit(1) -def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): +def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix): """ Lint one or more subworkflows in a directory. @@ -121,6 +121,7 @@ def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warn subworkflow_lint = SubworkflowLint( directory, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index c37de84f6d..67e05e0ce6 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING, List, Optional, Tuple, Union import questionary +import requests import rich.prompt if TYPE_CHECKING: @@ -162,3 +163,29 @@ def get_components_to_install(subworkflow_dir: Union[str, Path]) -> Tuple[List[s elif link.startswith("../"): subworkflows.append(name.lower()) return modules, subworkflows + + +def get_biotools_id(tool_name) -> str: + """ + Try to find a bio.tools ID for 'tool' + """ + url = f"https://bio.tools/api/t/?q={tool_name}&format=json" + try: + # Send a GET request to the API + response = requests.get(url) + response.raise_for_status() # Raise an error for bad status codes + # Parse the JSON response + data = response.json() + + # Iterate through the tools in the response to find the tool name + for tool in data["list"]: + if tool["name"].lower() == tool_name: + return tool["biotoolsCURIE"] + + # If the tool name was not found in the response + log.warning(f"Could not find a bio.tools ID for '{tool_name}'") + return "" + + except requests.exceptions.RequestException as e: + log.warning(f"Could not find a bio.tools ID for '{tool_name}': {e}") + return "" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index c71b128415..c781905618 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -21,6 +21,7 @@ import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import get_biotools_id from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -61,6 +62,7 @@ def __init__( self.file_paths: Dict[str, Path] = {} self.not_empty_template = not empty_template self.migrate_pytest = migrate_pytest + self.tool_identifier = "" def create(self) -> bool: """ @@ -149,6 +151,8 @@ def create(self) -> bool: if self.component_type == "modules": # Try to find a bioconda package for 'component' self._get_bioconda_tool() + # Try to find a biotools entry for 'component' + self.tool_identifier = get_biotools_id(self.component) # Prompt for GitHub username self._get_username() @@ -244,7 +248,7 @@ def _get_module_structure_components(self): if self.process_label is None: log.info( "Provide an appropriate resource label for the process, taken from the " - "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" + "[link=https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" "For example: {}".format(", ".join(process_label_defaults)) ) while self.process_label is None: diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 98f8be5272..31769785a1 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -211,9 +211,9 @@ def get_local_yaml(self) -> Optional[Dict]: return yaml.safe_load(fh) log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") - return None + return {} - def get_remote_yaml(self) -> Optional[dict]: + def get_remote_yaml(self) -> Optional[Dict]: """Attempt to get the meta.yml file from a remote repo. Returns: @@ -229,6 +229,25 @@ def get_remote_yaml(self) -> Optional[dict]: self.remote_location = self.modules_repo.remote_url return yaml.safe_load(file_contents) + def generate_params_table(self, type) -> Table: + "Generate a rich table for inputs and outputs" + table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) + table.add_column(f":inbox_tray: {type}") + table.add_column("Description") + if self.component_type == "modules": + table.add_column("Pattern", justify="right", style="green") + elif self.component_type == "subworkflows": + table.add_column("Structure", justify="right", style="green") + return table + + def get_channel_structure(self, structure: dict) -> str: + "Get the structure of a channel" + structure_str = "" + for key, info in structure.items(): + pattern = f" - {info['pattern']}" if info.get("pattern") else "" + structure_str += f"{key} ({info['type']}{pattern})" + return structure_str + def generate_component_info_help(self): """Take the parsed meta.yml and generate rich help. @@ -277,33 +296,48 @@ def generate_component_info_help(self): # Inputs if self.meta.get("input"): - inputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - inputs_table.add_column(":inbox_tray: Inputs") - inputs_table.add_column("Description") - inputs_table.add_column("Pattern", justify="right", style="green") - for input in self.meta["input"]: - for key, info in input.items(): - inputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + inputs_table = self.generate_params_table("Inputs") + for i, input in enumerate(self.meta["input"]): + inputs_table.add_row(f"[italic]input[{i}][/]", "", "") + if self.component_type == "modules": + for element in input: + for key, info in element.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in input.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(inputs_table) # Outputs if self.meta.get("output"): - outputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - outputs_table.add_column(":outbox_tray: Outputs") - outputs_table.add_column("Description") - outputs_table.add_column("Pattern", justify="right", style="green") + outputs_table = self.generate_params_table("Outputs") for output in self.meta["output"]: - for key, info in output.items(): - outputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + if self.component_type == "modules": + for ch_name, elements in output.items(): + outputs_table.add_row(f"{ch_name}", "", "") + for element in elements: + for key, info in element.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in output.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(outputs_table) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index c1b1f24cb7..fcc3b414d8 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -57,6 +57,7 @@ def __init__( component_type: str, directory: Union[str, Path], fail_warned: bool = False, + fix: bool = False, remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -73,6 +74,7 @@ def __init__( ) self.fail_warned = fail_warned + self.fix = fix self.passed: List[LintResult] = [] self.warned: List[LintResult] = [] self.failed: List[LintResult] = [] diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 05a8f71120..4c20e60864 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -25,7 +25,7 @@ def __init__( self.remote = remote super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - def _configure_repo_and_paths(self, nf_dir_req=True) -> None: + def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ Override the default with nf_dir_req set to False to allow info to be run from anywhere and still return remote info diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index db3196be91..37e43a536e 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union log = logging.getLogger(__name__) @@ -41,6 +41,7 @@ def __init__( remote_component (bool): Whether the module is to be treated as a nf-core or local component """ + self.component_type = component_type self.component_name = component_name self.repo_url = repo_url self.component_dir = component_dir @@ -49,7 +50,7 @@ def __init__( self.passed: List[Tuple[str, str, Path]] = [] self.warned: List[Tuple[str, str, Path]] = [] self.failed: List[Tuple[str, str, Path]] = [] - self.inputs: List[str] = [] + self.inputs: List[List[Dict[str, Dict[str, str]]]] = [] self.outputs: List[str] = [] self.has_meta: bool = False self.git_sha: Optional[str] = None @@ -64,7 +65,15 @@ def __init__( self.process_name = "" self.environment_yml: Optional[Path] = Path(self.component_dir, "environment.yml") - repo_dir = self.component_dir.parts[: self.component_dir.parts.index(self.component_name.split("/")[0])][-1] + component_list = self.component_name.split("/") + + name_index = len(self.component_dir.parts) - 1 - self.component_dir.parts[::-1].index(component_list[0]) + if len(component_list) != 1 and component_list[0] == component_list[1]: + # Handle cases where the subtool has the same name as the tool + name_index -= 1 + + repo_dir = self.component_dir.parts[:name_index][-1] + self.org = repo_dir self.nftest_testdir = Path(self.component_dir, "tests") self.nftest_main_nf = Path(self.nftest_testdir, "main.nf.test") @@ -162,45 +171,95 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" - inputs: List[str] = [] + inputs: Any = [] # Can be 'list[list[dict[str, dict[str, str]]]]' or 'list[str]' with open(self.main_nf) as f: data = f.read() - # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo - # regex matches: - # val(foo) - # path(bar) - # val foo - # val bar - # path bar - # path foo - # don't match anything inside comments or after "output:" - if "input:" not in data: - log.debug(f"Could not find any inputs in {self.main_nf}") - input_data = data.split("input:")[1].split("output:")[0] - regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" - matches = re.finditer(regex, input_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - if match.group(3): - input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - elif match.group(4): - input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") - self.inputs = inputs + if self.component_type == "modules": + # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo + # regex matches: + # val(foo) + # path(bar) + # val foo + # val bar + # path bar + # path foo + # don't match anything inside comments or after "output:" + if "input:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + input_data = data.split("input:")[1].split("output:")[0] + for line in input_data.split("\n"): + channel_elements: Any = [] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, line) + for _, match in enumerate(matches, start=1): + input_val = None + if match.group(3): + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + elif match.group(4): + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + if input_val: + channel_elements.append({input_val: {}}) + if len(channel_elements) > 0: + inputs.append(channel_elements) + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs + elif self.component_type == "subworkflows": + # get input values from main.nf after "take:" + if "take:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + # get all lines between "take" and "main" or "emit" + input_data = data.split("take:")[1].split("main:")[0].split("emit:")[0] + for line in input_data.split("\n"): + try: + inputs.append(line.split()[0]) + except IndexError: + pass # Empty lines + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs def get_outputs_from_main_nf(self): outputs = [] with open(self.main_nf) as f: data = f.read() - # get output values from main.nf after "output:". the names are always after "emit:" - if "output:" not in data: - log.debug(f"Could not find any outputs in {self.main_nf}") - return outputs - output_data = data.split("output:")[1].split("when:")[0] - regex = r"emit:\s*([^)\s,]+)" - matches = re.finditer(regex, output_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - outputs.append(match.group(1)) - log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") - self.outputs = outputs + if self.component_type == "modules": + # get output values from main.nf after "output:". the names are always after "emit:" + if "output:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("output:")[1].split("when:")[0] + regex_emit = r"emit:\s*([^)\s,]+)" + regex_elements = r"(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + for line in output_data.split("\n"): + match_emit = re.search(regex_emit, line) + matches_elements = re.finditer(regex_elements, line) + if not match_emit: + continue + output_channel = {match_emit.group(1): []} + for _, match_element in enumerate(matches_elements, start=1): + output_val = None + if match_element.group(3): + output_val = match_element.group(3) + elif match_element.group(4): + output_val = match_element.group(4) + if output_val: + output_val = output_val.strip("'").strip('"') # remove quotes + output_channel[match_emit.group(1)].append({output_val: {}}) + outputs.append(output_channel) + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs + elif self.component_type == "subworkflows": + # get output values from main.nf after "emit:". Can be named outputs or not. + if "emit:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("emit:")[1].split("}")[0] + for line in output_data.split("\n"): + try: + outputs.append(line.split("=")[0].split()[0]) + except IndexError: + # Empty lines + pass + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 849729966b..2a9fbb0ed6 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,7 +2,7 @@ # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . # See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. -FROM gitpod/workspace-base@sha256:f189a4195c3861365356f9c1b438ab26fd88e1ff46ce2843afc62861fc982e0c +FROM gitpod/workspace-base@sha256:2cc134fe5bd7d8fdbe44cab294925d4bc6d2d178d94624f4c376584a22d1f7b6 USER root @@ -45,7 +45,7 @@ USER gitpod RUN conda config --add channels bioconda && \ conda config --add channels conda-forge && \ conda config --set channel_priority strict && \ - conda install --quiet --yes --name base \ + conda install --quiet --yes --update-all --name base \ nextflow \ nf-test \ prettier \ diff --git a/nf_core/module-template/environment.yml b/nf_core/module-template/environment.yml index f234f85422..a8a40a8e03 100644 --- a/nf_core/module-template/environment.yml +++ b/nf_core/module-template/environment.yml @@ -1,6 +1,5 @@ --- # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json -name: "{{ component_name_underscore }}" channels: - conda-forge - bioconda diff --git a/nf_core/module-template/meta.yml b/nf_core/module-template/meta.yml index 9d3f3c1c12..d9d1cc8ae8 100644 --- a/nf_core/module-template/meta.yml +++ b/nf_core/module-template/meta.yml @@ -20,48 +20,67 @@ tools: tool_dev_url: "{{ tool_dev_url }}" doi: "" licence: {{ tool_licence }} + identifier: {{ tool_identifier }} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input {% endif -%} input: #{% if has_meta %} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` {% endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input {%- endif %} - - {{ 'bam:' if not_empty_template else "input:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ 'bam:' if not_empty_template else "input:" }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else %} + - edam: "" + {%- endif %} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output {% endif -%} output: + - {{ 'bam:' if not_empty_template else "output:" }} #{% if has_meta -%} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` - {% endif %} - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` + {%- endif %} {% if not_empty_template -%} - ## TODO nf-core: Delete / customise this example output + ## TODO nf-core: Delete / customise this example output {%- endif %} - - {{ 'bam:' if not_empty_template else "output:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ '"*.bam":' if not_empty_template else '"*":' }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else -%} + - edam: "" + {%- endif %} + - versions: + - "versions.yml": + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "{{ author }}" diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 017b3965b4..49012cff40 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -14,20 +14,22 @@ import questionary import rich import rich.progress +import ruamel.yaml import nf_core.components import nf_core.components.nfcore_component import nf_core.modules.modules_utils import nf_core.utils +from nf_core.components.components_utils import get_biotools_id from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) from .environment_yml import environment_yml from .main_nf import main_nf -from .meta_yml import meta_yml +from .meta_yml import meta_yml, obtain_correct_and_specified_inputs, obtain_correct_and_specified_outputs, read_meta_yml from .module_changes import module_changes from .module_deprecations import module_deprecations from .module_patch import module_patch @@ -46,6 +48,9 @@ class ModuleLint(ComponentLint): environment_yml = environment_yml main_nf = main_nf meta_yml = meta_yml + obtain_correct_and_specified_inputs = obtain_correct_and_specified_inputs + obtain_correct_and_specified_outputs = obtain_correct_and_specified_outputs + read_meta_yml = read_meta_yml module_changes = module_changes module_deprecations = module_deprecations module_patch = module_patch @@ -57,6 +62,7 @@ def __init__( self, directory: Union[str, Path], fail_warned: bool = False, + fix: bool = False, remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -67,6 +73,7 @@ def __init__( component_type="modules", directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -237,6 +244,12 @@ def lint_module( # Otherwise run all the lint tests else: + mod.get_inputs_from_main_nf() + mod.get_outputs_from_main_nf() + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(mod) + if self.repo_type == "pipeline" and self.modules_json and mod.repo_url: # Set correct sha version = self.modules_json.get_module_version(mod.component_name, mod.repo_url, mod.org) @@ -256,3 +269,104 @@ def lint_module( self.failed += warned self.failed += [LintResult(mod, *m) for m in mod.failed] + + def update_meta_yml_file(self, mod): + """ + Update the meta.yml file with the correct inputs and outputs + """ + meta_yml = self.read_meta_yml(mod) + corrected_meta_yml = meta_yml.copy() + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Obtain inputs and outputs from main.nf and meta.yml + # Used to compare only the structure of channels and elements + # Do not compare features to allow for custom features in meta.yml (i.e. pattern) + if "input" in meta_yml: + correct_inputs, meta_inputs = self.obtain_correct_and_specified_inputs(mod, meta_yml) + if "output" in meta_yml: + correct_outputs, meta_outputs = self.obtain_correct_and_specified_outputs(mod, meta_yml) + + if "input" in meta_yml and correct_inputs != meta_inputs: + log.debug( + f"Correct inputs: '{correct_inputs}' differ from current inputs: '{meta_inputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["input"] = mod.inputs.copy() # list of lists (channels) of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["input"]): + for j, element in enumerate(channel): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["input"]): + try: + # Handle old format of meta.yml: list of dicts (channels) + if element_name in meta_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + except AttributeError: + # Handle new format of meta.yml: list of lists (channels) of elements (dicts) + for x, meta_ch_element in enumerate(meta_element): + if element_name in meta_ch_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[x][element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[x][ + element_name + ][feature] + break + + if "output" in meta_yml and correct_outputs != meta_outputs: + log.debug( + f"Correct outputs: '{correct_outputs}' differ from current outputs: '{meta_outputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["output"] = mod.outputs.copy() # list of dicts (channels) with list of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["output"]): + ch_name = list(channel.keys())[0] + for j, element in enumerate(channel[ch_name]): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["output"]): + if element_name in meta_element.keys(): + # Copy current features of that output element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + elif ch_name in meta_element.keys(): + # When the previous output element was using the name of the channel + # Copy current features of that output element form meta.yml + try: + # Handle old format of meta.yml + for feature in meta_element[ch_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_element[ch_name][feature] + ) + except AttributeError: + # Handle new format of meta.yml + for x, meta_ch_element in enumerate(meta_element[ch_name]): + for meta_ch_element_name in meta_ch_element.keys(): + for feature in meta_ch_element[meta_ch_element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_ch_element[meta_ch_element_name][feature] + ) + break + + # Add bio.tools identifier + for i, tool in enumerate(corrected_meta_yml["tools"]): + tool_name = list(tool.keys())[0] + if "identifier" not in tool[tool_name]: + corrected_meta_yml["tools"][i][tool_name]["identifier"] = get_biotools_id( + mod.component_name if "/" not in mod.component_name else mod.component_name.split("/")[0] + ) + + with open(mod.meta_yml, "w") as fh: + log.info(f"Updating {mod.meta_yml}") + yaml.dump(corrected_meta_yml, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index 341b9cd730..4488b0befa 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -90,42 +90,3 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) env_yml["dependencies"].sort() with open(Path(module.component_dir, "environment.yml"), "w") as fh: yaml.dump(env_yml, fh, Dumper=custom_yaml_dumper()) - - # Check that the name in the environment.yml file matches the name in the meta.yml file - with open(Path(module.component_dir, "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - - if env_yml["name"] == meta_yml["name"]: - module.passed.append( - ( - "environment_yml_name", - "The module's `environment.yml` name matches module name", - module.environment_yml, - ) - ) - else: - module.failed.append( - ( - "environment_yml_name", - f"Conflicting process name between environment.yml (`{env_yml['name']}`) and meta.yml (`{module.component_name}`)", - module.environment_yml, - ) - ) - - # Check that the name is lowercase - if env_yml["name"] == env_yml["name"].lower(): - module.passed.append( - ( - "environment_yml_name_lowercase", - "The module's `environment.yml` name is lowercase", - module.environment_yml, - ) - ) - else: - module.failed.append( - ( - "environment_yml_name_lowercase", - "The module's `environment.yml` name is not lowercase", - module.environment_yml, - ) - ) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index dbc1bed737..54a69b113e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -342,6 +342,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): continue try: container_url = "https://" + urlunparse(url) if not url.scheme == "https" else urlunparse(url) + log.debug(f"Trying to connect to URL: {container_url}") response = requests.head( container_url, stream=True, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4a0ef6e01e..4ad728d10b 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,13 +1,17 @@ import json +import logging from pathlib import Path +from typing import Union -import yaml +import ruamel.yaml from jsonschema import exceptions, validators from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer +log = logging.getLogger(__name__) + def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None: """ @@ -39,10 +43,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None """ - module.get_inputs_from_main_nf() - module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case - meta_yaml = None + meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: lines = ModulesDiffer.try_apply_patch( module.component_name, @@ -52,17 +54,15 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None reverse=True, ).get("meta.yml") if lines is not None: + yaml = ruamel.yaml.YAML() meta_yaml = yaml.safe_load("".join(lines)) if module.meta_yml is None: raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: - try: - with open(module.meta_yml) as fh: - meta_yaml = yaml.safe_load(fh) - module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) - except FileNotFoundError: - module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) - return + module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) + return + else: + module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False @@ -93,79 +93,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ) ) - # Confirm that all input and output channels are specified + # Confirm that all input and output channels are correctly specified if valid_meta_yml: - if "input" in meta_yaml: - meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] - for input in module.inputs: - if input in meta_input: - module.passed.append(("meta_input_main_only", f"`{input}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_input_main_only", - f"`{input}` is present as an input in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any inputs in meta.yml that are not in main.nf - for input in meta_input: - if input in module.inputs: - module.passed.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) - - if "output" in meta_yaml and meta_yaml["output"] is not None: - meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] - for output in module.outputs: - if output in meta_output: - module.passed.append(("meta_output_main_only", f"`{output}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_output_main_only", - f"`{output}` is present as an output in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any outputs in meta.yml that are not in main.nf - for output in meta_output: - if output in module.outputs: - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - elif output == "meta": - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is skipped for `meta.yml` outputs", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == module.process_name: module.passed.append( @@ -183,3 +112,179 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.meta_yml, ) ) + # Check that inputs are specified in meta.yml + if len(module.inputs) > 0 and "input" not in meta_yaml: + module.failed.append( + ( + "meta_input", + "Inputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.inputs) > 0: + module.passed.append( + ( + "meta_input", + "Inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + log.debug(f"No inputs specified in module `main.nf`: {module.component_name}") + # Check that all inputs are correctly specified + if "input" in meta_yaml: + correct_inputs, meta_inputs = obtain_correct_and_specified_inputs(module_lint_object, module, meta_yaml) + + if correct_inputs == meta_inputs: + module.passed.append( + ( + "correct_meta_inputs", + "Correct inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_inputs", + f"Module `meta.yml` does not match `main.nf`. Inputs should contain: {correct_inputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + # Check that outputs are specified in meta.yml + if len(module.outputs) > 0 and "output" not in meta_yaml: + module.failed.append( + ( + "meta_output", + "Outputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.outputs) > 0: + module.passed.append( + ( + "meta_output", + "Outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + # Check that all outputs are correctly specified + if "output" in meta_yaml: + correct_outputs, meta_outputs = obtain_correct_and_specified_outputs(module_lint_object, module, meta_yaml) + + if correct_outputs == meta_outputs: + module.passed.append( + ( + "correct_meta_outputs", + "Correct outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_outputs", + f"Module `meta.yml` does not match `main.nf`. Outputs should contain: {correct_outputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + +def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> Union[dict, None]: + """ + Read a `meta.yml` file and return it as a dictionary + + Args: + module_lint_object (ComponentLint): The lint object for the module + module (NFCoreComponent): The module to read + + Returns: + dict: The `meta.yml` file as a dictionary + """ + meta_yaml = None + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + # Check if we have a patch file, get original file in that case + if module.is_patched: + lines = ModulesDiffer.try_apply_patch( + module.component_name, + module_lint_object.modules_repo.repo_path, + module.patch_path, + Path(module.component_dir).relative_to(module.base_dir), + reverse=True, + ).get("meta.yml") + if lines is not None: + meta_yaml = yaml.load("".join(lines)) + if meta_yaml is None: + if module.meta_yml is None: + return None + with open(module.meta_yml) as fh: + meta_yaml = yaml.load(fh) + return meta_yaml + + +def obtain_correct_and_specified_inputs(_, module, meta_yaml): + """ + Obtain the list of correct inputs and the elements of each input channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + tuple: A tuple containing two lists. The first list contains the correct inputs, + and the second list contains the inputs specified in meta.yml. + """ + correct_inputs = [] + for input_channel in module.inputs: + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + correct_inputs.append(channel_elements) + + meta_inputs = [] + for input_channel in meta_yaml["input"]: + if isinstance(input_channel, list): # Correct format + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + meta_inputs.append(channel_elements) + elif isinstance(input_channel, dict): # Old format + meta_inputs.append(list(input_channel.keys())[0]) + + return correct_inputs, meta_inputs + + +def obtain_correct_and_specified_outputs(_, module, meta_yaml): + """ + Obtain the dictionary of correct outputs and elements of each output channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + correct_outputs (dict): A dictionary containing the correct outputs and their elements. + meta_outputs (dict): A dictionary containing the outputs specified in meta.yml. + """ + correct_outputs = {} + for output_channel in module.outputs: + channel_name = list(output_channel.keys())[0] + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + correct_outputs[channel_name] = channel_elements + + meta_outputs = {} + for output_channel in meta_yaml["output"]: + channel_name = list(output_channel.keys())[0] + if isinstance(output_channel[channel_name], list): # Correct format + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + meta_outputs[channel_name] = channel_elements + elif isinstance(output_channel[channel_name], dict): # Old format + meta_outputs[channel_name] = [] + + return correct_outputs, meta_outputs diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index b6d7f0d0fa..f9ba9d30c7 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -391,8 +391,8 @@ def get_new_and_old_lines(patch): def try_apply_single_patch(file_lines, patch, reverse=False): """ Tries to apply a patch to a modified file. Since the line numbers in - the patch does not agree if the file is modified, the old and new - lines inpatch are reconstructed and then we look for the old lines + the patch do not agree if the file is modified, the old and new + lines in the patch are reconstructed and then we look for the old lines in the modified file. If all hunk in the patch are found in the new file it is updated with the new lines from the patch file. diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 536589d81e..0dbd87f776 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -432,7 +432,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(str(current_path), local_dir / to_name) - def unsynced_components(self) -> Tuple[List[str], List[str], dict]: + def unsynced_components(self) -> Tuple[List[str], List[str], Dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all @@ -1119,8 +1119,10 @@ def dump(self, run_prettier: bool = False) -> None: """ Sort the modules.json, and write it to file """ + # Sort the modules.json + if self.modules_json is None: + self.load() if self.modules_json is not None: - # Sort the modules.json self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) if run_prettier: dump_json_with_prettier(self.modules_json_path, self.modules_json) diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index 5efa586109..c78ec8e960 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -11,6 +11,7 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js}] indent_size = 2 +{% if modules -%} # These files are edited and tested upstream in nf-core/modules [/modules/nf-core/**] charset = unset @@ -24,8 +25,9 @@ end_of_line = unset insert_final_newline = unset trim_trailing_whitespace = unset indent_style = unset +{%- endif %} -{%- if email %} +{% if email -%} [/assets/email*] indent_size = unset {%- endif %} diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index f331d38673..0200ea26ce 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -30,14 +30,14 @@ If you're not used to this workflow with git, you can start with some [docs from ## Tests -{%- if test_config %} +{% if test_config -%} You have the option to test your changes locally by running the pipeline. For receiving warnings about process selectors and other `debug` information, it is recommended to use the debug profile. Execute all the tests with the following command: ```bash nf-test test --profile debug,test,docker --verbose ``` -{% endif %} +{% endif -%} When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. @@ -95,7 +95,7 @@ If you wish to contribute a new step, please use the following coding standards: {%- if multiqc %} 9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. 10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. - {% endif %} + {%- endif %} ### Default values @@ -105,7 +105,7 @@ Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json ### Default processes resource requirements -Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. +Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. The process resources can be passed on to the tool dynamically within the process with the `${task.cpus}` and `${task.memory}` variables in the `script:` block. @@ -139,4 +139,4 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) - {% endif %} + {%- endif %} diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index dc0450be43..d8987330d5 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -14,16 +14,18 @@ on: jobs: run-platform: name: Run AWS full tests - if: github.repository == '{{ name }}' && github.event.review.state == 'approved' + # run only if the PR is approved by at least 2 reviewers and against the master branch or manually triggered + if: github.repository == '{{ name }}' && github.event.review.state == 'approved' && github.event.pull_request.base.ref == 'master' || github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest steps: - uses: octokit/request-action@v2.x id: check_approvals with: - route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.review.number }}/reviews + route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - id: test_variables + if: github.event_name != 'workflow_dispatch' run: | JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}'{% endraw %} CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length') diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 6b2547765d..61738a4147 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -1,5 +1,5 @@ name: nf-core CI -# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors +# {% raw %}This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors on: push: branches: @@ -7,40 +7,79 @@ on: pull_request: release: types: [published] + workflow_dispatch: env: NXF_ANSI_LOG: false + NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity + NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity concurrency: - group: "{% raw %}${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}{% endraw %}" + group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" cancel-in-progress: true jobs: test: - name: Run pipeline with test data + name: "Run pipeline with test data (${{ matrix.NXF_VER }} | ${{ matrix.test_name }} | ${{ matrix.profile }})" # Only run on push if this is the nf-core dev branch (merged PRs) - if: "{% raw %}${{{% endraw %} github.event_name != 'push' || (github.event_name == 'push' && github.repository == '{{ name }}') {% raw %}}}{% endraw %}" + if: "${{{% endraw %} github.event_name != 'push' || (github.event_name == 'push' && github.repository == '{{ name }}') {% raw %}}}" runs-on: ubuntu-latest strategy: matrix: NXF_VER: - - "23.04.0" + - "24.04.2" - "latest-everything" + profile: + - "conda" + - "docker" + - "singularity" + test_name: + - "test" + isMaster: + - ${{ github.base_ref == 'master' }} + # Exclude conda and singularity on dev + exclude: + - isMaster: false + profile: "conda" + - isMaster: false + profile: "singularity" steps: - name: Check out pipeline code uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - name: Install Nextflow + - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 with: - version: "{% raw %}${{ matrix.NXF_VER }}{% endraw %}" + version: "${{ matrix.NXF_VER }}" - - name: Disk space cleanup + - name: Set up Apptainer + if: matrix.profile == 'singularity' + uses: eWaterCycle/setup-apptainer@main + + - name: Set up Singularity + if: matrix.profile == 'singularity' + run: | + mkdir -p $NXF_SINGULARITY_CACHEDIR + mkdir -p $NXF_SINGULARITY_LIBRARYDIR + + - name: Set up Miniconda + if: matrix.profile == 'conda' + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3 + with: + miniconda-version: "latest" + auto-update-conda: true + conda-solver: libmamba + channels: conda-forge,bioconda + + - name: Set up Conda + if: matrix.profile == 'conda' + run: | + echo $(realpath $CONDA)/condabin >> $GITHUB_PATH + echo $(realpath python) >> $GITHUB_PATH + + - name: Clean up Disk space uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 - - name: Run pipeline with test data - # TODO nf-core: You can customise CI pipeline run tests as required - # For example: adding multiple test runs with different parameters - # Remember that you can parallelise this by using strategy.matrix + - name: "Run pipeline with test data ${{ matrix.NXF_VER }} | ${{ matrix.test_name }} | ${{ matrix.profile }}" run: | - nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results + nextflow run ${GITHUB_WORKSPACE} -profile ${{ matrix.test_name }},${{ matrix.profile }} --outdir ./results{% endraw %} diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index e7a28e5ac4..fdd5492ca2 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -39,9 +39,11 @@ jobs: with: python-version: "3.12" architecture: "x64" - - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 + + - name: Setup Apptainer + uses: eWaterCycle/setup-apptainer@4bb22c52d4f63406c49e94c804632975787312b3 # v2.0.0 with: - singularity-version: 3.8.3 + apptainer-version: 1.3.4 - name: Install dependencies run: | @@ -54,33 +56,64 @@ jobs: echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} + - name: Make a cache directory for the container images + run: | + mkdir -p ./singularity_container_images + - name: Download the pipeline env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images run: | nf-core pipelines download ${{ env.REPO_LOWERCASE }} \ --revision ${{ env.REPO_BRANCH }} \ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ --container-system 'singularity' \ - --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ + --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io" \ --container-cache-utilisation 'amend' \ - --download-configuration + --download-configuration 'yes' - name: Inspect download run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + - name: Count the downloaded number of container images + id: count_initial + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Initial container image count: $image_count" + echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV} + - name: Run the downloaded pipeline (stub) id: stub_run_pipeline continue-on-error: true env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results - name: Run the downloaded pipeline (stub run not supported) id: run_pipeline if: ${{ job.steps.stub_run_pipeline.status == failure() }} env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %}{% endif %} + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results + + - name: Count the downloaded number of container images + id: count_afterwards + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Post-pipeline run container image count: $image_count" + echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV} + + - name: Compare container image counts + run: | + if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then + initial_count=${{ env.IMAGE_COUNT_INITIAL }} + final_count=${{ env.IMAGE_COUNT_AFTER }} + difference=$((final_count - initial_count)) + echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" + tree ./singularity_container_images + exit 1 + else + echo "The pipeline can be downloaded successfully!" + fi{% endraw %}{% endif %} diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index ecd5e32f6c..0eee862f96 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -42,10 +42,10 @@ jobs: architecture: "x64" - name: read .nf-core.yml - uses: pietrobolcato/action-read-yaml@1.0.0 + uses: pietrobolcato/action-read-yaml@1.1.0 id: read_yml with: - config: ${{ github.workspace }}/.nf-core.yaml + config: ${{ github.workspace }}/.nf-core.yml - name: Install dependencies run: | @@ -53,12 +53,21 @@ jobs: pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }} - name: Run nf-core pipelines lint + if: ${{ github.base_ref != 'master' }} env: GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} run: nf-core -l lint_log.txt pipelines lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md + - name: Run nf-core pipelines lint --release + if: ${{ github.base_ref == 'master' }} + env: + GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} + run: nf-core -l lint_log.txt pipelines lint --release --dir ${GITHUB_WORKSPACE} --markdown lint_results.md + - name: Save PR number if: ${{ always() }} run: echo ${{ github.event.pull_request.number }} > PR_number.txt diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index 8fee061fdd..035ed63bba 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -12,7 +12,7 @@ jobs: - name: get topics and convert to hashtags id: get_topics run: | - echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" >> $GITHUB_OUTPUT + echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" | sed 's/-//g' >> $GITHUB_OUTPUT - uses: rzr/fediverse-action@master with: diff --git a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml index 58db2eb63a..87a218446b 100644 --- a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml @@ -10,9 +10,11 @@ jobs: steps: - name: Check out pipeline code uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: Read template version from .nf-core.yml - uses: pietrobolcato/action-read-yaml@1.0.0 + uses: nichmor/minimal-read-yaml@v0.0.2 id: read_yml with: config: ${{ github.workspace }}/.nf-core.yml @@ -24,20 +26,21 @@ jobs: - name: Check nf-core outdated id: nf_core_outdated - run: pip list --outdated | grep nf-core + run: echo "OUTPUT=$(pip list --outdated | grep nf-core)" >> ${GITHUB_ENV} - name: Post nf-core template version comment uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 if: | - ${{ steps.nf_core_outdated.outputs.stdout }} =~ 'nf-core' + contains(env.OUTPUT, 'nf-core') with: repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} allow-repeats: false message: | - ## :warning: Newer version of the nf-core template is available. - - Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. - Please update your pipeline to the latest version. - - For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). + > [!WARNING] + > Newer version of the nf-core template is available. + > + > Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. + > Please update your pipeline to the latest version. + > + > For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). #{%- endraw %} diff --git a/nf_core/pipeline-template/.gitignore b/nf_core/pipeline-template/.gitignore index 5124c9ac77..a42ce0162e 100644 --- a/nf_core/pipeline-template/.gitignore +++ b/nf_core/pipeline-template/.gitignore @@ -6,3 +6,4 @@ results/ testing/ testing* *.pyc +null/ diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml index 4dc0f1dcd7..9e9f0e1c4e 100644 --- a/nf_core/pipeline-template/.pre-commit-config.yaml +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - prettier@3.2.5 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index c8e8ad9e11..7ecc9b61cb 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -1,4 +1,4 @@ -{%- if email %} +{% if email -%} email_template.html {%- endif %} {%- if adaptivecard %} diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 2373f1de7f..16da9a4207 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -15,10 +15,14 @@ {% if fastqc %}- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. -> {% endif %} > {% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) + +{%- endif %} + +{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, KΓ€ller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. -> {%- endif %} + +{%- endif %} ## Software packaging/containerisation tools @@ -41,4 +45,5 @@ - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. - > {%- endif %} + + {%- endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index c8ed828d38..a8f2e60546 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -7,6 +7,10 @@ +{% else %} + +# {{ name }} + {% endif -%} {% if github_badges -%} [![GitHub Actions CI Status](https://github.com/{{ name }}/actions/workflows/ci.yml/badge.svg)](https://github.com/{{ name }}/actions/workflows/ci.yml) @@ -16,7 +20,7 @@ [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) [![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -124,7 +128,7 @@ An extensive list of references for the tools used by the pipeline can be found You can cite the `nf-core` publication as follows: {% else -%} -This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/master/LICENSE). +This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/main/LICENSE). {% endif -%} diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index cd4e539b31..e6fd878986 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -3,11 +3,11 @@ report_comment: > This report has been generated by the {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {%- else %} + {%- else -%} This report has been generated by the {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {% endif %} + {%- endif %} report_section_order: "{{ name_noslash }}-methods-description": order: -1000 diff --git a/nf_core/pipeline-template/assets/schema_input.json b/nf_core/pipeline-template/assets/schema_input.json index e76b95fa99..28a468adaf 100644 --- a/nf_core/pipeline-template/assets/schema_input.json +++ b/nf_core/pipeline-template/assets/schema_input.json @@ -1,5 +1,5 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/{{ name }}/master/assets/schema_input.json", "title": "{{ name }} pipeline - params.input schema", "description": "Schema for the file provided with params.input", diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 9c62bf0634..fa292339e3 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -11,9 +11,9 @@ process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 @@ -27,30 +27,30 @@ process { // TODO nf-core: Customise requirements for specific processes. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_single { - cpus = { check_max( 1 , 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 12.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 2 * task.attempt } + memory = { 12.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 36.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } + cpus = { 6 * task.attempt } + memory = { 36.GB * task.attempt } + time = { 8.h * task.attempt } } withLabel:process_high { - cpus = { check_max( 12 * task.attempt, 'cpus' ) } - memory = { check_max( 72.GB * task.attempt, 'memory' ) } - time = { check_max( 16.h * task.attempt, 'time' ) } + cpus = { 12 * task.attempt } + memory = { 72.GB * task.attempt } + time = { 16.h * task.attempt } } withLabel:process_long { - time = { check_max( 20.h * task.attempt, 'time' ) } + time = { 20.h * task.attempt } } withLabel:process_high_memory { - memory = { check_max( 200.GB * task.attempt, 'memory' ) } + memory = { 200.GB * task.attempt } } withLabel:error_ignore { errorStrategy = 'ignore' diff --git a/nf_core/pipeline-template/conf/igenomes_ignored.config b/nf_core/pipeline-template/conf/igenomes_ignored.config new file mode 100644 index 0000000000..b4034d8243 --- /dev/null +++ b/nf_core/pipeline-template/conf/igenomes_ignored.config @@ -0,0 +1,9 @@ +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Nextflow config file for iGenomes paths +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Empty genomes dictionary to use when igenomes is ignored. +---------------------------------------------------------------------------------------- +*/ + +params.genomes = [:] diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 827e21b7b7..bea6f670d0 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -10,15 +10,18 @@ ---------------------------------------------------------------------------------------- */ +process { + resourceLimits = [ + cpus: 4, + memory: '15.GB', + time: '1.h' + ] +} + params { config_profile_name = 'Test profile' config_profile_description = 'Minimal test dataset to check pipeline function' - // Limit resources so that this can run on GitHub Actions - max_cpus = 2 - max_memory = '6.GB' - max_time = '6.h' - // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 5e42d50cc5..83d5d23fe3 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -12,12 +12,16 @@ The directories listed below will be created in the results directory after the The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: -{% if fastqc %}- [FastQC](#fastqc) - Raw read QC{% endif %} -{% if multiqc %}- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline{% endif %} +{% if fastqc -%} +- [FastQC](#fastqc) - Raw read QC + {%- endif %} + {%- if multiqc %} +- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline + {%- endif %} - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution -{%- if fastqc %} +{% if fastqc -%} ### FastQC @@ -32,7 +36,8 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). {%- endif %} -{%- if multiqc %} + +{% if multiqc -%} ### MultiQC @@ -49,7 +54,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . -{% endif %} +{%- endif %} ### Pipeline information @@ -58,7 +63,8 @@ Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQ - `pipeline_info/` - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - {% if email %}- Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. {% endif %} + {%- if email %} + - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline.{% endif %} - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. - Parameters used by the pipeline run: `params.json`. diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 7002a9c2d5..6644d74a2a 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -18,6 +18,7 @@ */ include { {{ short_name|upper }} } from './workflows/{{ short_name }}' +{%- if modules %} include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' {%- if igenomes %} @@ -33,7 +34,7 @@ include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_{{ s // This is an example of how to use getGenomeAttribute() to fetch parameters // from igenomes.config using `--genome` params.fasta = getGenomeAttribute('fasta') -{% endif %} +{% endif %}{% endif %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ NAMED WORKFLOWS FOR PIPELINE @@ -56,10 +57,10 @@ workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} { {{ short_name|upper }} ( samplesheet ) -{%- if multiqc %} +{%- if multiqc %}{%- if modules %} emit: multiqc_report = {{ short_name|upper }}.out.multiqc_report // channel: /path/to/multiqc_report.html -{%- endif %} +{%- endif %}{%- endif %} } /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -71,26 +72,32 @@ workflow { main: + {%- if modules %} // // SUBWORKFLOW: Run initialisation tasks // PIPELINE_INITIALISATION ( params.version, - params.help, params.validate_params, params.monochrome_logs, args, params.outdir, params.input ) + {%- endif %} // // WORKFLOW: Run main workflow // {{ prefix_nodash|upper }}_{{ short_name|upper }} ( + {%- if modules %} PIPELINE_INITIALISATION.out.samplesheet + {%- else %} + params.input + {%- endif %} ) + {%- if modules %} // // SUBWORKFLOW: Run completion tasks // @@ -105,6 +112,7 @@ workflow { {% if adaptivecard or slackreport %}params.hook_url,{% endif %} {% if multiqc %}{{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} ) + {%- endif %} } /* diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 6ab68d0f2a..f714eb1d93 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -8,12 +8,12 @@ {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "285a50500f9e02578d90b3ce6382ea3c30216acd", + "git_sha": "666652151335353eef2fcd58880bcef5bc2928e1", "installed_by": ["modules"] }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { "branch": "master", - "git_sha": "b7ebe95761cd389603f9cc0e0dc384c0f663815a", + "git_sha": "cf17ca47590cc578dfb47db1c2a44ef86f89976d", "installed_by": ["modules"] } {%- endif %} @@ -23,19 +23,19 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "20c03aede5a80ff520a905cea1f8ca121b5bb661", + "git_sha": "3aa0aec1d52d492fe241919f0c6100ebf0074082", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "92de218a329bfc9a9033116eb5f65fd270e72ba3", + "git_sha": "1b6b9a3338d011367137808b49b923515080e3ba", "installed_by": ["subworkflows"] - }, - "utils_nfvalidation_plugin": { + }{% if nf_schema %}, + "utils_nfschema_plugin": { "branch": "master", - "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", + "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", "installed_by": ["subworkflows"] - } + }{% endif %} } } } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml index 0d5be45f26..691d4c7638 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml @@ -1,4 +1,3 @@ -name: fastqc channels: - conda-forge - bioconda diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d79f1c862d..d8989f4812 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -26,7 +26,10 @@ process FASTQC { def rename_to = old_new_pairs*.join(' ').join(' ') def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') - def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') + // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) + // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 + // Dividing the task.memory by task.cpu allows to stick to requested amount of RAM in the label + def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') / task.cpus // FastQC memory value allowed range (100 - 10000) def fastqc_memory = memory_in_mb > 10000 ? 10000 : (memory_in_mb < 100 ? 100 : memory_in_mb) diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index ee5507e06b..4827da7af2 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -16,35 +16,44 @@ tools: homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ licence: ["GPL-2.0-only"] + identifier: biotools:fastqc input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - reads: - type: file - description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - html: - type: file - description: FastQC report - pattern: "*_{fastqc.html}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.html": + type: file + description: FastQC report + pattern: "*_{fastqc.html}" - zip: - type: file - description: FastQC report archive - pattern: "*_{fastqc.zip}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.zip": + type: file + description: FastQC report archive + pattern: "*_{fastqc.zip}" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@drpatelh" - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test index 70edae4d99..e9d79a074e 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test @@ -23,17 +23,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. - // looks like this:
Mon 2 Oct 2023
test.gz
- // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_single") } + { assert process.success }, + // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. + // looks like this:
Mon 2 Oct 2023
test.gz
+ // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -54,16 +51,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_paired") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -83,13 +78,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_interleaved") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -109,13 +102,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_bam") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -138,22 +129,20 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, - { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, - { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_multiple") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, + { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, + { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -173,21 +162,18 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_custom_prefix") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } test("sarscov2 single-end [fastq] - stub") { - options "-stub" - + options "-stub" when { process { """ @@ -201,12 +187,123 @@ nextflow_process { then { assertAll ( - { assert process.success }, - { assert snapshot(process.out.html.collect { file(it[1]).getName() } + - process.out.zip.collect { file(it[1]).getName() } + - process.out.versions ).match("fastqc_stub") } + { assert process.success }, + { assert snapshot(process.out).match() } ) } } + test("sarscov2 paired-end [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 interleaved [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 paired-end [bam] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 multiple [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 custom_prefix - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [ id:'mysample', single_end:true ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap index 86f7c31154..d5db3092fb 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -1,88 +1,392 @@ { - "fastqc_versions_interleaved": { + "sarscov2 custom_prefix": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:07.293713" + "timestamp": "2024-07-22T11:02:16.374038" }, - "fastqc_stub": { + "sarscov2 single-end [fastq] - stub": { "content": [ - [ - "test.html", - "test.zip", - "versions.yml:md5,e1cc25ca8af856014824abd842e93978" - ] + { + "0": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:24.993809" + }, + "sarscov2 custom_prefix - stub": { + "content": [ + { + "0": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:31:01.425198" + "timestamp": "2024-07-22T11:03:10.93942" }, - "fastqc_versions_multiple": { + "sarscov2 interleaved [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:55.797907" + "timestamp": "2024-07-22T11:01:42.355718" }, - "fastqc_versions_bam": { + "sarscov2 paired-end [bam]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:26.795862" + "timestamp": "2024-07-22T11:01:53.276274" }, - "fastqc_versions_single": { + "sarscov2 multiple [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:27.043675" + "timestamp": "2024-07-22T11:02:05.527626" }, - "fastqc_versions_paired": { + "sarscov2 paired-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:31.188871" + }, + "sarscov2 paired-end [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:34.273566" + }, + "sarscov2 multiple [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:47.584191" + "timestamp": "2024-07-22T11:03:02.304411" }, - "fastqc_versions_custom_prefix": { + "sarscov2 single-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:19.095607" + }, + "sarscov2 interleaved [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:44.640184" + }, + "sarscov2 paired-end [bam] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:41:14.576531" + "timestamp": "2024-07-22T11:02:53.550742" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index 329ddb4870..6f5b867b76 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -1,6 +1,5 @@ -name: multiqc channels: - conda-forge - bioconda dependencies: - - bioconda::multiqc=1.21 + - bioconda::multiqc=1.25.1 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 47ac352f94..cc0643e1d5 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -3,14 +3,16 @@ process MULTIQC { conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.21--pyhdfd78af_0' : - 'biocontainers/multiqc:1.21--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.25.1--pyhdfd78af_0' : + 'biocontainers/multiqc:1.25.1--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" path(multiqc_config) path(extra_multiqc_config) path(multiqc_logo) + path(replace_names) + path(sample_names) output: path "*multiqc_report.html", emit: report @@ -23,16 +25,22 @@ process MULTIQC { script: def args = task.ext.args ?: '' + def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : '' def config = multiqc_config ? "--config $multiqc_config" : '' def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' - def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' + def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : '' + def replace = replace_names ? "--replace-names ${replace_names}" : '' + def samples = sample_names ? "--sample-names ${sample_names}" : '' """ multiqc \\ --force \\ $args \\ $config \\ + $prefix \\ $extra_config \\ $logo \\ + $replace \\ + $samples \\ . cat <<-END_VERSIONS > versions.yml @@ -44,7 +52,7 @@ process MULTIQC { stub: """ mkdir multiqc_data - touch multiqc_plots + mkdir multiqc_plots touch multiqc_report.html cat <<-END_VERSIONS > versions.yml diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index 45a9bc35e1..b16c187923 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -1,5 +1,6 @@ name: multiqc -description: Aggregate results from bioinformatics analyses across many samples into a single report +description: Aggregate results from bioinformatics analyses across many samples into + a single report keywords: - QC - bioinformatics tools @@ -12,40 +13,59 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] + identifier: biotools:multiqc input: - - multiqc_files: - type: file - description: | - List of reports / files recognised by MultiQC, for example the html and zip output of FastQC - - multiqc_config: - type: file - description: Optional config yml for MultiQC - pattern: "*.{yml,yaml}" - - extra_multiqc_config: - type: file - description: Second optional config yml for MultiQC. Will override common sections in multiqc_config. - pattern: "*.{yml,yaml}" - - multiqc_logo: - type: file - description: Optional logo file for MultiQC - pattern: "*.{png}" + - - multiqc_files: + type: file + description: | + List of reports / files recognised by MultiQC, for example the html and zip output of FastQC + - - multiqc_config: + type: file + description: Optional config yml for MultiQC + pattern: "*.{yml,yaml}" + - - extra_multiqc_config: + type: file + description: Second optional config yml for MultiQC. Will override common sections + in multiqc_config. + pattern: "*.{yml,yaml}" + - - multiqc_logo: + type: file + description: Optional logo file for MultiQC + pattern: "*.{png}" + - - replace_names: + type: file + description: | + Optional two-column sample renaming file. First column a set of + patterns, second column a set of corresponding replacements. Passed via + MultiQC's `--replace-names` option. + pattern: "*.{tsv}" + - - sample_names: + type: file + description: | + Optional TSV file with headers, passed to the MultiQC --sample_names + argument. + pattern: "*.{tsv}" output: - report: - type: file - description: MultiQC report file - pattern: "multiqc_report.html" + - "*multiqc_report.html": + type: file + description: MultiQC report file + pattern: "multiqc_report.html" - data: - type: directory - description: MultiQC data dir - pattern: "multiqc_data" + - "*_data": + type: directory + description: MultiQC data dir + pattern: "multiqc_data" - plots: - type: file - description: Plots created by MultiQC - pattern: "*_data" + - "*_plots": + type: file + description: Plots created by MultiQC + pattern: "*_data" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@abhi18av" - "@bunop" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test index f1c4242ef2..33316a7ddb 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test @@ -8,6 +8,8 @@ nextflow_process { tag "modules_nfcore" tag "multiqc" + config "./nextflow.config" + test("sarscov2 single-end [fastqc]") { when { @@ -17,6 +19,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -41,6 +45,8 @@ nextflow_process { input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -66,6 +72,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap index bfebd80298..2fcbb5ff7d 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -2,14 +2,14 @@ "multiqc_versions_single": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:48:55.657331" + "timestamp": "2024-10-02T17:51:46.317523" }, "multiqc_stub": { "content": [ @@ -17,25 +17,25 @@ "multiqc_report.html", "multiqc_data", "multiqc_plots", - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:49:49.071937" + "timestamp": "2024-10-02T17:52:20.680978" }, "multiqc_versions_config": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:49:25.457567" + "timestamp": "2024-10-02T17:52:09.185842" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config new file mode 100644 index 0000000000..c537a6a3e7 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config @@ -0,0 +1,5 @@ +process { + withName: 'MULTIQC' { + ext.prefix = null + } +} diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index b366884199..0afaaa6785 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -13,14 +13,14 @@ params { // Input options input = null - {%- if igenomes %} + {% if igenomes -%} // References genome = null igenomes_base = 's3://ngi-igenomes/igenomes/' igenomes_ignore = false {%- endif %} - {%- if multiqc %} + {% if multiqc -%} // MultiQC options multiqc_config = null multiqc_title = null @@ -31,19 +31,21 @@ params { // Boilerplate options outdir = null - publish_dir_mode = 'copy' + {% if modules %}publish_dir_mode = 'copy'{% endif %} {%- if email %} email = null email_on_fail = null plaintext_email = false {%- endif %} - monochrome_logs = false - hook_url = null - help = false + {% if modules %}monochrome_logs = false{% endif %} + {% if slackreport or adaptivecard %}hook_url = null{% endif %} + {% if nf_schema %}help = false + help_full = false + show_hidden = false{% endif %} version = false {% if test_config %}pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} - {%- if nf_core_configs %} + {% if nf_core_configs -%} // Config options config_profile_name = null config_profile_description = null @@ -54,39 +56,28 @@ params { config_profile_url = null {%- endif %} - // Max resource options - // Defaults only, expecting to be overwritten - max_memory = '128.GB' - max_cpus = 16 - max_time = '240.h' - + {% if nf_schema -%} // Schema validation default options - validationFailUnrecognisedParams = false - validationLenientMode = false - validationSchemaIgnoreParams = 'genomes,igenomes_base' - validationShowHiddenParams = false - validate_params = true - + validate_params = true + {%- endif %} } +{% if modules -%} // Load base.config by default for all pipelines includeConfig 'conf/base.config' +{%- else %} -{% if nf_core_configs -%} -// Load nf-core custom profiles from different Institutions -try { - includeConfig "${params.custom_config_base}/nfcore_custom.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") -} +process { + // TODO nf-core: Check the defaults for all processes + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } -// Load {{ name }} custom profiles from different institutions. -try { - includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") + errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } + maxRetries = 1 + maxErrors = '-1' } -{% endif -%} +{%- endif %} profiles { debug { @@ -182,19 +173,28 @@ profiles { wave.freeze = true wave.strategy = 'conda,container' } - {%- if gitpod %} + {% if gitpod -%} gitpod { executor.name = 'local' executor.cpus = 4 executor.memory = 8.GB } {%- endif %} - {%- if test_config %} + {% if test_config -%} test { includeConfig 'conf/test.config' } test_full { includeConfig 'conf/test_full.config' } {%- endif %} } +{% if nf_core_configs -%} +// Load nf-core custom profiles from different Institutions +includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null" + +// Load {{ name }} custom profiles from different institutions. +// TODO nf-core: Optionally, you can add a pipeline-specific nf-core config at https://github.com/nf-core/configs +// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/pipeline/{{ short_name }}.config" : "/dev/null" +{%- endif %} + // Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile // Will not be used unless Apptainer / Docker / Podman / Charliecloud / Singularity are enabled // Set to your registry if you have a mirror of containers @@ -204,19 +204,10 @@ podman.registry = 'quay.io' singularity.registry = 'quay.io' charliecloud.registry = 'quay.io' -// Nextflow plugins -plugins { - id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet -} - {% if igenomes -%} // Load igenomes.config if required -if (!params.igenomes_ignore) { - includeConfig 'conf/igenomes.config' -} else { - params.genomes = [:] -} -{% endif -%} +includeConfig !params.igenomes_ignore ? 'conf/igenomes.config' : 'conf/igenomes_ignored.config' +{%- endif %} // Export these variables to prevent local Python/R libraries from conflicting with those in the container // The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container. @@ -266,43 +257,51 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' } -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' +{% if nf_schema -%} +// Nextflow plugins +plugins { + id 'nf-schema@2.1.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet +} -// Function to ensure that resource requirements don't go beyond -// a maximum limit -def check_max(obj, type) { - if (type == 'memory') { - try { - if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) - return params.max_memory as nextflow.util.MemoryUnit - else - return obj - } catch (all) { - println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'time') { - try { - if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) - return params.max_time as nextflow.util.Duration - else - return obj - } catch (all) { - println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'cpus') { - try { - return Math.min( obj, params.max_cpus as int ) - } catch (all) { - println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" - return obj - } - } +validation { + defaultIgnoreParams = ["genomes"] + help { + enabled = true + command = "nextflow run {{ name }} -profile --input samplesheet.csv --outdir " + fullParameter = "help_full" + showHiddenParameter = "show_hidden" + {% if is_nfcore -%} + beforeText = """ +-\033[2m----------------------------------------------------\033[0m- + \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m +\033[0;34m ___ __ __ __ ___ \033[0;32m/,-._.--~\'\033[0m +\033[0;34m |\\ | |__ __ / ` / \\ |__) |__ \033[0;33m} {\033[0m +\033[0;34m | \\| | \\__, \\__/ | \\ |___ \033[0;32m\\`-._,-`-,\033[0m + \033[0;32m`._,._,\'\033[0m +\033[0;35m {{ name }} ${manifest.version}\033[0m +-\033[2m----------------------------------------------------\033[0m- +""" + afterText = """${manifest.doi ? "\n* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} +* The nf-core framework + https://doi.org/10.1038/s41587-020-0439-x + +* Software dependencies + https://github.com/{{ name }}/blob/master/CITATIONS.md +"""{% endif %} + }{% if is_nfcore %} + summary { + beforeText = validation.help.beforeText + afterText = validation.help.afterText + }{% endif %} } +{%- endif %} + +{% if modules -%} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{%- endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 758689e89e..4136a0b490 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -1,10 +1,10 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/{{ name }}/master/nextflow_schema.json", "title": "{{ name }} pipeline parameters", "description": "{{ description }}", "type": "object", - "definitions": { + "$defs": { "input_output_options": { "title": "Input/output options", "type": "object", @@ -40,8 +40,7 @@ "type": "string", "description": "MultiQC report title. Printed as page header, used for filename if not otherwise specified.", "fa_icon": "fas fa-file-signature" - } - {% endif %} + }{% endif %} } }, {%- if igenomes %} @@ -57,6 +56,7 @@ "fa_icon": "fas fa-book", "help_text": "If using a reference genome configured in the pipeline using iGenomes, use this parameter to give the ID for the reference. This is then used to build the full paths for all required reference genome files e.g. `--genome GRCh38`. \n\nSee the [nf-core website docs](https://nf-co.re/usage/reference_genomes) for more details." }, + {%- if modules %} "fasta": { "type": "string", "format": "file-path", @@ -67,12 +67,21 @@ "help_text": "This parameter is *mandatory* if `--genome` is not specified. If you don't have a BWA index available this will be generated for you automatically. Combine with `--save_reference` to save BWA index for future runs.", "fa_icon": "far fa-file-code" }, + {%- endif %} "igenomes_ignore": { "type": "boolean", "description": "Do not load the iGenomes reference config.", "fa_icon": "fas fa-ban", "hidden": true, "help_text": "Do not load `igenomes.config` when running the pipeline. You may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`." + }, + "igenomes_base": { + "type": "string", + "format": "directory-path", + "description": "The base path to the igenomes reference files", + "fa_icon": "fas fa-ban", + "hidden": true, + "default": "s3://ngi-igenomes/igenomes/" } } }, @@ -127,41 +136,6 @@ } }, {%- endif %} - "max_job_request_options": { - "title": "Max job request options", - "type": "object", - "fa_icon": "fab fa-acquisitions-incorporated", - "description": "Set the top limit for requested resources for any single job.", - "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.", - "properties": { - "max_cpus": { - "type": "integer", - "description": "Maximum number of CPUs that can be requested for any single job.", - "default": 16, - "fa_icon": "fas fa-microchip", - "hidden": true, - "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`" - }, - "max_memory": { - "type": "string", - "description": "Maximum amount of memory that can be requested for any single job.", - "default": "128.GB", - "fa_icon": "fas fa-memory", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", - "hidden": true, - "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" - }, - "max_time": { - "type": "string", - "description": "Maximum amount of time that can be requested for any single job.", - "default": "240.h", - "fa_icon": "far fa-clock", - "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", - "hidden": true, - "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" - } - } - }, "generic_options": { "title": "Generic options", "type": "object", @@ -169,18 +143,13 @@ "description": "Less common options for the pipeline, typically set in a config file.", "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", "properties": { - "help": { - "type": "boolean", - "description": "Display help text.", - "fa_icon": "fas fa-question-circle", - "hidden": true - }, "version": { "type": "boolean", "description": "Display version and exit.", "fa_icon": "fas fa-question-circle", "hidden": true }, + {%- if modules %} "publish_dir_mode": { "type": "string", "default": "copy", @@ -189,7 +158,7 @@ "fa_icon": "fas fa-copy", "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], "hidden": true - },{% if email %} + },{% endif %}{% if email %} "email_on_fail": { "type": "string", "description": "Email address for completion summary, only when pipeline fails.", @@ -212,21 +181,22 @@ "default": "25.MB", "fa_icon": "fas fa-file-upload", "hidden": true - }, - {% endif %} + },{% endif %} + {%- if modules %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", "fa_icon": "fas fa-palette", "hidden": true - }, + },{% endif %} + {%- if slackreport or adaptivecard %} "hook_url": { "type": "string", "description": "Incoming hook URL for messaging service", "fa_icon": "fas fa-people-group", "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", "hidden": true - }, + },{% endif %} {%- if multiqc %} "multiqc_config": { "type": "string", @@ -252,27 +222,6 @@ "default": true, "fa_icon": "fas fa-check-square", "hidden": true - }, - "validationShowHiddenParams": { - "type": "boolean", - "fa_icon": "far fa-eye-slash", - "description": "Show all params when using `--help`", - "hidden": true, - "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." - }, - "validationFailUnrecognisedParams": { - "type": "boolean", - "fa_icon": "far fa-check-circle", - "description": "Validation of parameters fails when an unrecognised parameter is found.", - "hidden": true, - "help_text": "By default, when an unrecognised parameter is found, it returns a warinig." - }, - "validationLenientMode": { - "type": "boolean", - "fa_icon": "far fa-check-circle", - "description": "Validation of parameters in lenient more.", - "hidden": true, - "help_text": "Allows string values that are parseable as numbers or booleans. For further information see [JSONSchema docs](https://github.com/everit-org/json-schema#lenient-mode)." }{% if test_config %}, "pipelines_testdata_base_path": { "type": "string", @@ -286,19 +235,16 @@ }, "allOf": [ { - "$ref": "#/definitions/input_output_options" + "$ref": "#/$defs/input_output_options" }, {% if igenomes %}{ - "$ref": "#/definitions/reference_genome_options" + "$ref": "#/$defs/reference_genome_options" },{% endif %} {% if nf_core_configs %}{ - "$ref": "#/definitions/institutional_config_options" + "$ref": "#/$defs/institutional_config_options" },{% endif %} { - "$ref": "#/definitions/max_job_request_options" - }, - { - "$ref": "#/definitions/generic_options" + "$ref": "#/$defs/generic_options" } ] } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index b130b7f88a..78fed1fcf6 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -8,33 +8,29 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin' -include { paramsSummaryMap } from 'plugin/nf-validation' -include { fromSamplesheet } from 'plugin/nf-validation' -include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' +{% if nf_schema %}include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin' +include { paramsSummaryMap } from 'plugin/nf-schema' +include { samplesheetToList } from 'plugin/nf-schema'{% endif %} {%- if email %} include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' {%- endif %} include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' -include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' -include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' {%- if adaptivecard or slackreport %} include { imNotification } from '../../nf-core/utils_nfcore_pipeline' {%- endif %} include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' -include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline' +include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW TO INITIALISE PIPELINE -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow PIPELINE_INITIALISATION { take: version // boolean: Display version and exit - help // boolean: Display help text validate_params // boolean: Boolean whether to validate parameters against the schema at runtime monochrome_logs // boolean: Do not use coloured log outputs nextflow_cli_args // array: List of positional nextflow CLI args @@ -55,20 +51,17 @@ workflow PIPELINE_INITIALISATION { workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 ) + {%- if nf_schema %} + // // Validate parameters and generate parameter summary to stdout // - pre_help_text = nfCoreLogo(monochrome_logs) - post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs) - def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir " - UTILS_NFVALIDATION_PLUGIN ( - help, - workflow_command, - pre_help_text, - post_help_text, + UTILS_NFSCHEMA_PLUGIN ( + workflow, validate_params, - "nextflow_schema.json" + null ) + {%- endif %} // // Check config provided to the pipeline @@ -78,6 +71,7 @@ workflow PIPELINE_INITIALISATION { ) {%- if igenomes %} + // // Custom validation for pipeline parameters // @@ -87,8 +81,14 @@ workflow PIPELINE_INITIALISATION { // // Create channel from input file provided through params.input // - Channel - .fromSamplesheet("input") + + Channel{% if nf_schema %} + .fromList(samplesheetToList(params.input, "${projectDir}/assets/schema_input.json")){% else %} + .fromPath(params.input) + .splitCsv(header: true, strip: true) + .map { row -> + [[id:row.sample], row.fastq_1, row.fastq_2] + }{% endif %} .map { meta, fastq_1, fastq_2 -> if (!fastq_2) { @@ -98,8 +98,8 @@ workflow PIPELINE_INITIALISATION { } } .groupTuple() - .map { - validateInputSamplesheet(it) + .map { samplesheet -> + validateInputSamplesheet(samplesheet) } .map { meta, fastqs -> @@ -113,9 +113,9 @@ workflow PIPELINE_INITIALISATION { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW FOR PIPELINE COMPLETION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow PIPELINE_COMPLETION { @@ -125,15 +125,18 @@ workflow PIPELINE_COMPLETION { email // string: email address email_on_fail // string: email address sent on pipeline failure plaintext_email // boolean: Send plain-text email instead of HTML - {% endif %} + {%- endif %} outdir // path: Path to output directory where results will be published monochrome_logs // boolean: Disable ANSI colour codes in log output {% if adaptivecard or slackreport %}hook_url // string: hook URL for notifications{% endif %} {% if multiqc %}multiqc_report // string: Path to MultiQC report{% endif %} main: - + {%- if nf_schema %} summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + {%- else %} + summary_params = [:] + {%- endif %} // // Completion email and summary @@ -141,11 +144,15 @@ workflow PIPELINE_COMPLETION { workflow.onComplete { {%- if email %} if (email || email_on_fail) { - {%- if multiqc %} - completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) - {%- else %} - completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, []) - {%- endif %} + completionEmail( + summary_params, + email, + email_on_fail, + plaintext_email, + outdir, + monochrome_logs, + {% if multiqc %}multiqc_report.toList(){% else %}[]{% endif %} + ) } {%- endif %} @@ -164,9 +171,9 @@ workflow PIPELINE_COMPLETION { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ {%- if igenomes %} @@ -185,7 +192,7 @@ def validateInputSamplesheet(input) { def (metas, fastqs) = input[1..2] // Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end - def endedness_ok = metas.collect{ it.single_end }.unique().size == 1 + def endedness_ok = metas.collect{ meta -> meta.single_end }.unique().size == 1 if (!endedness_ok) { error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}") } @@ -262,8 +269,10 @@ def methodsDescriptionText(mqc_methods_yaml) { // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers // Removing ` ` since the manifest.doi is a string and not a proper list def temp_doi_ref = "" - String[] manifest_doi = meta.manifest_map.doi.tokenize(",") - for (String doi_ref: manifest_doi) temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + def manifest_doi = meta.manifest_map.doi.tokenize(",") + manifest_doi.each { doi_ref -> + temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + } meta["doi_text"] = temp_doi_ref.substring(0, temp_doi_ref.length() - 2) } else meta["doi_text"] = "" meta["nodoi_text"] = meta.manifest_map.doi ? "" : "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index e770d91b97..0fcbf7b3f2 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -2,18 +2,13 @@ // Subworkflow with functionality that may be useful for any Nextflow pipeline // -import org.yaml.snakeyaml.Yaml -import groovy.json.JsonOutput -import nextflow.extension.FilesEx - /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW DEFINITION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow UTILS_NEXTFLOW_PIPELINE { - take: print_version // boolean: print version dump_parameters // boolean: dump parameters @@ -26,7 +21,7 @@ workflow UTILS_NEXTFLOW_PIPELINE { // Print workflow version and exit on --version // if (print_version) { - log.info "${workflow.manifest.name} ${getWorkflowVersion()}" + log.info("${workflow.manifest.name} ${getWorkflowVersion()}") System.exit(0) } @@ -49,16 +44,16 @@ workflow UTILS_NEXTFLOW_PIPELINE { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ // // Generate version string // def getWorkflowVersion() { - String version_string = "" + def version_string = "" as String if (workflow.manifest.version) { def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' version_string += "${prefix_v}${workflow.manifest.version}" @@ -76,13 +71,13 @@ def getWorkflowVersion() { // Dump pipeline parameters to a JSON file // def dumpParametersToJSON(outdir) { - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def filename = "params_${timestamp}.json" - def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") - def jsonStr = JsonOutput.toJson(params) - temp_pf.text = JsonOutput.prettyPrint(jsonStr) + def timestamp = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss') + def filename = "params_${timestamp}.json" + def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") + def jsonStr = groovy.json.JsonOutput.toJson(params) + temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr) - FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") + nextflow.extension.FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") temp_pf.delete() } @@ -90,14 +85,19 @@ def dumpParametersToJSON(outdir) { // When running with -profile conda, warn if channels have not been set-up appropriately // def checkCondaChannels() { - Yaml parser = new Yaml() + def parser = new org.yaml.snakeyaml.Yaml() def channels = [] try { def config = parser.load("conda config --show channels".execute().text) channels = config.channels - } catch(NullPointerException | IOException e) { - log.warn "Could not verify conda channel configuration." - return + } + catch (NullPointerException e) { + log.warn("Could not verify conda channel configuration.") + return null + } + catch (IOException e) { + log.warn("Could not verify conda channel configuration.") + return null } // Check that all channels are present @@ -106,21 +106,19 @@ def checkCondaChannels() { def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean // Check that they are in the right order - def channel_priority_violation = false - def n = required_channels_in_order.size() - for (int i = 0; i < n - 1; i++) { - channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) - } + def channel_priority_violation = required_channels_in_order != channels.findAll { ch -> ch in required_channels_in_order } if (channels_missing | channel_priority_violation) { - log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + - " There is a problem with your Conda configuration!\n\n" + - " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/\n" + - " The observed channel order is \n" + - " ${channels}\n" + - " but the following channel order is required:\n" + - " ${required_channels_in_order}\n" + - "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + log.warn """\ + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + There is a problem with your Conda configuration! + You will need to set-up the conda-forge and bioconda channels correctly. + Please refer to https://bioconda.github.io/ + The observed channel order is + ${channels} + but the following channel order is required: + ${required_channels_in_order} + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + """.stripIndent(true) } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config index d0a926bf6d..a09572e5bb 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config @@ -3,7 +3,7 @@ manifest { author = """nf-core""" homePage = 'https://127.0.0.1' description = """Dummy pipeline""" - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=23.04.0' version = '9.9.9' doi = 'https://doi.org/10.5281/zenodo.5070524' } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 14558c3927..5cb7bafef3 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -2,17 +2,13 @@ // Subworkflow with utility functions specific to the nf-core pipeline template // -import org.yaml.snakeyaml.Yaml -import nextflow.extension.FilesEx - /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW DEFINITION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow UTILS_NFCORE_PIPELINE { - take: nextflow_cli_args @@ -25,23 +21,20 @@ workflow UTILS_NFCORE_PIPELINE { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ // // Warn if a -profile or Nextflow config has not been provided to run the pipeline // def checkConfigProvided() { - valid_config = true + def valid_config = true as Boolean if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { - log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + - "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + - " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + - " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + - " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + - "Please refer to the quick start section and usage docs for the pipeline.\n " + log.warn( + "[${workflow.manifest.name}] You are attempting to run the pipeline without any custom configuration!\n\n" + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + "Please refer to the quick start section and usage docs for the pipeline.\n " + ) valid_config = false } return valid_config @@ -52,12 +45,14 @@ def checkConfigProvided() { // def checkProfileProvided(nextflow_cli_args) { if (workflow.profile.endsWith(',')) { - error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + - "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + error( + "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + ) } if (nextflow_cli_args[0]) { - log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + - "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + log.warn( + "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + ) } } @@ -66,25 +61,21 @@ def checkProfileProvided(nextflow_cli_args) { // def workflowCitation() { def temp_doi_ref = "" - String[] manifest_doi = workflow.manifest.doi.tokenize(",") - // Using a loop to handle multiple DOIs + def manifest_doi = workflow.manifest.doi.tokenize(",") + // Handling multiple DOIs // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers // Removing ` ` since the manifest.doi is a string and not a proper list - for (String doi_ref: manifest_doi) temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + - "* The pipeline\n" + - temp_doi_ref + "\n" + - "* The nf-core framework\n" + - " https://doi.org/10.1038/s41587-020-0439-x\n\n" + - "* Software dependencies\n" + - " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" + manifest_doi.each { doi_ref -> + temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" + } + return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" } // // Generate workflow version string // def getWorkflowVersion() { - String version_string = "" + def version_string = "" as String if (workflow.manifest.version) { def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' version_string += "${prefix_v}${workflow.manifest.version}" @@ -102,8 +93,8 @@ def getWorkflowVersion() { // Get software versions for pipeline // def processVersionsFromYAML(yaml_file) { - Yaml yaml = new Yaml() - versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] } + def yaml = new org.yaml.snakeyaml.Yaml() + def versions = yaml.load(yaml_file).collectEntries { k, v -> [k.tokenize(':')[-1], v] } return yaml.dumpAsMap(versions).trim() } @@ -113,8 +104,8 @@ def processVersionsFromYAML(yaml_file) { def workflowVersionToYAML() { return """ Workflow: - $workflow.manifest.name: ${getWorkflowVersion()} - Nextflow: $workflow.nextflow.version + ${workflow.manifest.name}: ${getWorkflowVersion()} + Nextflow: ${workflow.nextflow.version} """.stripIndent().trim() } @@ -122,11 +113,7 @@ def workflowVersionToYAML() { // Get channel of software versions used in pipeline in YAML format // def softwareVersionsToYAML(ch_versions) { - return ch_versions - .unique() - .map { processVersionsFromYAML(it) } - .unique() - .mix(Channel.of(workflowVersionToYAML())) + return ch_versions.unique().map { version -> processVersionsFromYAML(version) }.unique().mix(Channel.of(workflowVersionToYAML())) } // @@ -134,25 +121,31 @@ def softwareVersionsToYAML(ch_versions) { // def paramsSummaryMultiqc(summary_params) { def summary_section = '' - for (group in summary_params.keySet()) { - def group_params = summary_params.get(group) // This gets the parameters of that particular group - if (group_params) { - summary_section += "

    $group

    \n" - summary_section += "
    \n" - for (param in group_params.keySet()) { - summary_section += "
    $param
    ${group_params.get(param) ?: 'N/A'}
    \n" + summary_params + .keySet() + .each { group -> + def group_params = summary_params.get(group) + // This gets the parameters of that particular group + if (group_params) { + summary_section += "

    ${group}

    \n" + summary_section += "
    \n" + group_params + .keySet() + .sort() + .each { param -> + summary_section += "
    ${param}
    ${group_params.get(param) ?: 'N/A'}
    \n" + } + summary_section += "
    \n" } - summary_section += "
    \n" } - } - String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" + def yaml_file_text = "id: '${workflow.manifest.name.replace('/', '-')}-summary'\n" as String + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" return yaml_file_text } @@ -161,7 +154,7 @@ def paramsSummaryMultiqc(summary_params) { // nf-core logo // def nfCoreLogo(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map String.format( """\n ${dashedLine(monochrome_logs)} @@ -180,7 +173,7 @@ def nfCoreLogo(monochrome_logs=true) { // Return dashed line // def dashedLine(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map return "-${colors.dim}----------------------------------------------------${colors.reset}-" } @@ -188,7 +181,7 @@ def dashedLine(monochrome_logs=true) { // ANSII colours used for terminal logging // def logColours(monochrome_logs=true) { - Map colorcodes = [:] + def colorcodes = [:] as Map // Reset / Meta colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" @@ -200,54 +193,54 @@ def logColours(monochrome_logs=true) { colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" + colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" + colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" + colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" + colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" + colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" + colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" + colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" + colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" + colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" + colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" + colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" + colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" + colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" + colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" + colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" + colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" + colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" + colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" + colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" + colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" + colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" + colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" + colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" + colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" + colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" + colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" + colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" + colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" + colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" + colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" + colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" + colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" + colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" + colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" + colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" + colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" + colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" + colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" + colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" + colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" return colorcodes } @@ -262,14 +255,15 @@ def attachMultiqcReport(multiqc_report) { mqc_report = multiqc_report.getVal() if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { if (mqc_report.size() > 1) { - log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" + log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") } mqc_report = mqc_report[0] } } - } catch (all) { + } + catch (Exception all) { if (multiqc_report) { - log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" + log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") } } return mqc_report @@ -281,26 +275,35 @@ def attachMultiqcReport(multiqc_report) { def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) { // Set up the e-mail variables - def subject = "[$workflow.manifest.name] Successful: $workflow.runName" + def subject = "[${workflow.manifest.name}] Successful: ${workflow.runName}" if (!workflow.success) { - subject = "[$workflow.manifest.name] FAILED: $workflow.runName" + subject = "[${workflow.manifest.name}] FAILED: ${workflow.runName}" } def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } + summary_params + .keySet() + .sort() + .each { group -> + summary << summary_params[group] + } def misc_fields = [:] misc_fields['Date Started'] = workflow.start misc_fields['Date Completed'] = workflow.complete misc_fields['Pipeline script file path'] = workflow.scriptFile misc_fields['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository - if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId - if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision - misc_fields['Nextflow Version'] = workflow.nextflow.version - misc_fields['Nextflow Build'] = workflow.nextflow.build + if (workflow.repository) { + misc_fields['Pipeline repository Git URL'] = workflow.repository + } + if (workflow.commitId) { + misc_fields['Pipeline repository Git Commit'] = workflow.commitId + } + if (workflow.revision) { + misc_fields['Pipeline Git branch/tag'] = workflow.revision + } + misc_fields['Nextflow Version'] = workflow.nextflow.version + misc_fields['Nextflow Build'] = workflow.nextflow.build misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] @@ -338,39 +341,41 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi // Render the sendmail template def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] + def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) def sendmail_html = sendmail_template.toString() // Send the HTML e-mail - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map if (email_address) { try { - if (plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } + if (plaintext_email) { +new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" - } catch (all) { + ['sendmail', '-t'].execute() << sendmail_html + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") + } + catch (Exception all) { // Catch failures and try with plaintext - def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] + def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] mail_cmd.execute() << email_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (mail)-") } } // Write summary e-mail HTML to a file def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") output_hf.withWriter { w -> w << email_html } - FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html"); + nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html") output_hf.delete() // Write summary e-mail TXT to a file def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") output_tf.withWriter { w -> w << email_txt } - FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt"); + nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt") output_tf.delete() } @@ -378,15 +383,17 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi // Print pipeline summary on completion // def completionSummary(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map if (workflow.success) { if (workflow.stats.ignoredCount == 0) { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Pipeline completed successfully${colors.reset}-") + } + else { + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-") } - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" + } + else { + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.red} Pipeline completed with errors${colors.reset}-") } } @@ -395,21 +402,30 @@ def completionSummary(monochrome_logs=true) { // def imNotification(summary_params, hook_url) { def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } + summary_params + .keySet() + .sort() + .each { group -> + summary << summary_params[group] + } def misc_fields = [:] - misc_fields['start'] = workflow.start - misc_fields['complete'] = workflow.complete - misc_fields['scriptfile'] = workflow.scriptFile - misc_fields['scriptid'] = workflow.scriptId - if (workflow.repository) misc_fields['repository'] = workflow.repository - if (workflow.commitId) misc_fields['commitid'] = workflow.commitId - if (workflow.revision) misc_fields['revision'] = workflow.revision - misc_fields['nxf_version'] = workflow.nextflow.version - misc_fields['nxf_build'] = workflow.nextflow.build - misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) { + misc_fields['repository'] = workflow.repository + } + if (workflow.commitId) { + misc_fields['commitid'] = workflow.commitId + } + if (workflow.revision) { + misc_fields['revision'] = workflow.revision + } + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp def msg_fields = [:] msg_fields['version'] = getWorkflowVersion() @@ -434,13 +450,13 @@ def imNotification(summary_params, hook_url) { def json_message = json_template.toString() // POST - def post = new URL(hook_url).openConnection(); + def post = new URL(hook_url).openConnection() post.setRequestMethod("POST") post.setDoOutput(true) post.setRequestProperty("Content-Type", "application/json") - post.getOutputStream().write(json_message.getBytes("UTF-8")); - def postRC = post.getResponseCode(); - if (! postRC.equals(200)) { - log.warn(post.getErrorStream().getText()); + post.getOutputStream().write(json_message.getBytes("UTF-8")) + def postRC = post.getResponseCode() + if (!postRC.equals(200)) { + log.warn(post.getErrorStream().getText()) } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf new file mode 100644 index 0000000000..4994303ea0 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf @@ -0,0 +1,46 @@ +// +// Subworkflow that uses the nf-schema plugin to validate parameters and render the parameter summary +// + +include { paramsSummaryLog } from 'plugin/nf-schema' +include { validateParameters } from 'plugin/nf-schema' + +workflow UTILS_NFSCHEMA_PLUGIN { + + take: + input_workflow // workflow: the workflow object used by nf-schema to get metadata from the workflow + validate_params // boolean: validate the parameters + parameters_schema // string: path to the parameters JSON schema. + // this has to be the same as the schema given to `validation.parametersSchema` + // when this input is empty it will automatically use the configured schema or + // "${projectDir}/nextflow_schema.json" as default. This input should not be empty + // for meta pipelines + + main: + + // + // Print parameter summary to stdout. This will display the parameters + // that differ from the default given in the JSON schema + // + if(parameters_schema) { + log.info paramsSummaryLog(input_workflow, parameters_schema:parameters_schema) + } else { + log.info paramsSummaryLog(input_workflow) + } + + // + // Validate the parameters using nextflow_schema.json or the schema + // given via the validation.parametersSchema configuration option + // + if(validate_params) { + if(parameters_schema) { + validateParameters(parameters_schema:parameters_schema) + } else { + validateParameters() + } + } + + emit: + dummy_emit = true +} + diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml new file mode 100644 index 0000000000..f7d9f02885 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml @@ -0,0 +1,35 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "utils_nfschema_plugin" +description: Run nf-schema to validate parameters and create a summary of changed parameters +keywords: + - validation + - JSON schema + - plugin + - parameters + - summary +components: [] +input: + - input_workflow: + type: object + description: | + The workflow object of the used pipeline. + This object contains meta data used to create the params summary log + - validate_params: + type: boolean + description: Validate the parameters and error if invalid. + - parameters_schema: + type: string + description: | + Path to the parameters JSON schema. + This has to be the same as the schema given to the `validation.parametersSchema` config + option. When this input is empty it will automatically use the configured schema or + "${projectDir}/nextflow_schema.json" as default. The schema should not be given in this way + for meta pipelines. +output: + - dummy_emit: + type: boolean + description: Dummy emit to make nf-core subworkflows lint happy +authors: + - "@nvnieuwk" +maintainers: + - "@nvnieuwk" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test new file mode 100644 index 0000000000..842dc432af --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -0,0 +1,117 @@ +nextflow_workflow { + + name "Test Subworkflow UTILS_NFSCHEMA_PLUGIN" + script "../main.nf" + workflow "UTILS_NFSCHEMA_PLUGIN" + + tag "subworkflows" + tag "subworkflows_nfcore" + tag "subworkflows/utils_nfschema_plugin" + tag "plugin/nf-schema" + + config "./nextflow.config" + + test("Should run nothing") { + + when { + + params { + test_data = '' + } + + workflow { + """ + validate_params = false + input[0] = workflow + input[1] = validate_params + input[2] = "" + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should validate params") { + + when { + + params { + test_data = '' + outdir = 1 + } + + workflow { + """ + validate_params = true + input[0] = workflow + input[1] = validate_params + input[2] = "" + """ + } + } + + then { + assertAll( + { assert workflow.failed }, + { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } } + ) + } + } + + test("Should run nothing - custom schema") { + + when { + + params { + test_data = '' + } + + workflow { + """ + validate_params = false + input[0] = workflow + input[1] = validate_params + input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should validate params - custom schema") { + + when { + + params { + test_data = '' + outdir = 1 + } + + workflow { + """ + validate_params = true + input[0] = workflow + input[1] = validate_params + input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" + """ + } + } + + then { + assertAll( + { assert workflow.failed }, + { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } } + ) + } + } +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config new file mode 100644 index 0000000000..0907ac58f0 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config @@ -0,0 +1,8 @@ +plugins { + id "nf-schema@2.1.0" +} + +validation { + parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" + monochromeLogs = true +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json similarity index 95% rename from nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json rename to nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json index 7626c1c93e..331e0d2f44 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json @@ -1,10 +1,10 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", "title": ". pipeline parameters", "description": "", "type": "object", - "definitions": { + "$defs": { "input_output_options": { "title": "Input/output options", "type": "object", @@ -87,10 +87,10 @@ }, "allOf": [ { - "$ref": "#/definitions/input_output_options" + "$ref": "#/$defs/input_output_options" }, { - "$ref": "#/definitions/generic_options" + "$ref": "#/$defs/generic_options" } ] } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf deleted file mode 100644 index 2585b65d1b..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf +++ /dev/null @@ -1,62 +0,0 @@ -// -// Subworkflow that uses the nf-validation plugin to render help text and parameter summary -// - -/* -======================================================================================== - IMPORT NF-VALIDATION PLUGIN -======================================================================================== -*/ - -include { paramsHelp } from 'plugin/nf-validation' -include { paramsSummaryLog } from 'plugin/nf-validation' -include { validateParameters } from 'plugin/nf-validation' - -/* -======================================================================================== - SUBWORKFLOW DEFINITION -======================================================================================== -*/ - -workflow UTILS_NFVALIDATION_PLUGIN { - - take: - print_help // boolean: print help - workflow_command // string: default commmand used to run pipeline - pre_help_text // string: string to be printed before help text and summary log - post_help_text // string: string to be printed after help text and summary log - validate_params // boolean: validate parameters - schema_filename // path: JSON schema file, null to use default value - - main: - - log.debug "Using schema file: ${schema_filename}" - - // Default values for strings - pre_help_text = pre_help_text ?: '' - post_help_text = post_help_text ?: '' - workflow_command = workflow_command ?: '' - - // - // Print help message if needed - // - if (print_help) { - log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text - System.exit(0) - } - - // - // Print parameter summary to stdout - // - log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text - - // - // Validate parameters relative to the parameter JSON schema - // - if (validate_params){ - validateParameters(parameters_schema: schema_filename) - } - - emit: - dummy_emit = true -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml deleted file mode 100644 index 3d4a6b04f5..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml +++ /dev/null @@ -1,44 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json -name: "UTILS_NFVALIDATION_PLUGIN" -description: Use nf-validation to initiate and validate a pipeline -keywords: - - utility - - pipeline - - initialise - - validation -components: [] -input: - - print_help: - type: boolean - description: | - Print help message and exit - - workflow_command: - type: string - description: | - The command to run the workflow e.g. "nextflow run main.nf" - - pre_help_text: - type: string - description: | - Text to print before the help message - - post_help_text: - type: string - description: | - Text to print after the help message - - validate_params: - type: boolean - description: | - Validate the parameters and error if invalid. - - schema_filename: - type: string - description: | - The filename of the schema to validate against. -output: - - dummy_emit: - type: boolean - description: | - Dummy emit to make nf-core subworkflows lint happy -authors: - - "@adamrtalbot" -maintainers: - - "@adamrtalbot" - - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test deleted file mode 100644 index 5784a33f2f..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test +++ /dev/null @@ -1,200 +0,0 @@ -nextflow_workflow { - - name "Test Workflow UTILS_NFVALIDATION_PLUGIN" - script "../main.nf" - workflow "UTILS_NFVALIDATION_PLUGIN" - tag "subworkflows" - tag "subworkflows_nfcore" - tag "plugin/nf-validation" - tag "'plugin/nf-validation'" - tag "utils_nfvalidation_plugin" - tag "subworkflows/utils_nfvalidation_plugin" - - test("Should run nothing") { - - when { - - params { - monochrome_logs = true - test_data = '' - } - - workflow { - """ - help = false - workflow_command = null - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } - - test("Should run help") { - - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = null - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } } - ) - } - } - - test("Should run help with command") { - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = "nextflow run noorg/doesntexist" - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } } - ) - } - } - - test("Should run help with extra text") { - - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = "nextflow run noorg/doesntexist" - pre_help_text = "pre-help-text" - post_help_text = "post-help-text" - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('pre-help-text') } }, - { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } }, - { assert workflow.stdout.any { it.contains('post-help-text') } } - ) - } - } - - test("Should validate params") { - - when { - - params { - monochrome_logs = true - test_data = '' - outdir = 1 - } - workflow { - """ - help = false - workflow_command = null - pre_help_text = null - post_help_text = null - validate_params = true - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.failed }, - { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } } - ) - } - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml deleted file mode 100644 index 60b1cfff49..0000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -subworkflows/utils_nfvalidation_plugin: - - subworkflows/nf-core/utils_nfvalidation_plugin/** diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 8c797ede38..4dd8674c1b 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -4,12 +4,14 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ +{%- if modules %} {% if fastqc %}include { FASTQC } from '../modules/nf-core/fastqc/main'{% endif %} {% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} -include { paramsSummaryMap } from 'plugin/nf-validation' +{% if nf_schema %}include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} {% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' {% if citations or multiqc %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} +{%- endif %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -22,6 +24,7 @@ workflow {{ short_name|upper }} { take: ch_samplesheet // channel: samplesheet read in from --input + {%- if modules %} main: ch_versions = Channel.empty() @@ -44,7 +47,7 @@ workflow {{ short_name|upper }} { softwareVersionsToYAML(ch_versions) .collectFile( storeDir: "${params.outdir}/pipeline_info", - name: {% if is_nfcore %}'nf_core_' {% else %} '' {% endif %} + 'pipeline_software_' + {% if multiqc %} 'mqc_' {% else %} '' {% endif %} + 'versions.yml', + name: {% if is_nfcore %}'nf_core_' + {% endif %} '{{ short_name }}_software_' {% if multiqc %} + 'mqc_' {% endif %} + 'versions.yml', sort: true, newLine: true ).set { ch_collated_versions } @@ -62,9 +65,14 @@ workflow {{ short_name|upper }} { Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty() + {%- if nf_schema %} + summary_params = paramsSummaryMap( workflow, parameters_schema: "nextflow_schema.json") ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + ch_multiqc_files = ch_multiqc_files.mix( + ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + {%- endif %} {%- if citations %} ch_multiqc_custom_methods_description = params.multiqc_methods_description ? @@ -74,8 +82,6 @@ workflow {{ short_name|upper }} { methodsDescriptionText(ch_multiqc_custom_methods_description)) {%- endif %} - ch_multiqc_files = ch_multiqc_files.mix( - ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) {%- if citations %} ch_multiqc_files = ch_multiqc_files.mix( @@ -90,12 +96,15 @@ workflow {{ short_name|upper }} { ch_multiqc_files.collect(), ch_multiqc_config.toList(), ch_multiqc_custom_config.toList(), - ch_multiqc_logo.toList() + ch_multiqc_logo.toList(), + [], + [] ) {% endif %} emit: {%- if multiqc %}multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html{% endif %} versions = ch_versions // channel: [ path(versions.yml) ] +{% endif %} } /* diff --git a/nf_core/pipelines/bump_version.py b/nf_core/pipelines/bump_version.py index 18aa869328..3190ed70d4 100644 --- a/nf_core/pipelines/bump_version.py +++ b/nf_core/pipelines/bump_version.py @@ -5,9 +5,10 @@ import logging import re from pathlib import Path -from typing import List, Tuple, Union +from typing import List, Optional, Tuple, Union import rich.console +from ruamel.yaml import YAML import nf_core.utils from nf_core.utils import Pipeline @@ -60,6 +61,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/releases/tag/{new_version}", ) ], + yaml_key=["report_comment"], ) if multiqc_current_version != "dev" and multiqc_new_version == "dev": update_file_version( @@ -71,6 +73,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: "/tree/dev", ) ], + yaml_key=["report_comment"], ) if multiqc_current_version == "dev" and multiqc_new_version != "dev": update_file_version( @@ -82,6 +85,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/releases/tag/{multiqc_new_version}", ) ], + yaml_key=["report_comment"], ) update_file_version( Path("assets", "multiqc_config.yml"), @@ -92,6 +96,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/{multiqc_new_version}/", ), ], + yaml_key=["report_comment"], ) # nf-test snap files pipeline_name = pipeline_obj.nf_config.get("manifest.name", "").strip(" '\"") @@ -107,6 +112,20 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: ) ], ) + # .nf-core.yml - pipeline version + # update entry: version: 1.0.0dev, but not `nf_core_version`, or `bump_version` + update_file_version( + ".nf-core.yml", + pipeline_obj, + [ + ( + current_version, + new_version, + ) + ], + required=False, + yaml_key=["template", "version"], + ) def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: @@ -147,10 +166,11 @@ def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: # example: # NXF_VER: # - "20.04.0" - rf"- \"{re.escape(current_version)}\"", - f'- "{new_version}"', + current_version, + new_version, ) ], + yaml_key=["jobs", "test", "strategy", "matrix", "NXF_VER"], ) # README.md - Nextflow version badge @@ -161,70 +181,128 @@ def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: ( rf"nextflow%20DSL2-%E2%89%A5{re.escape(current_version)}-23aa62.svg", f"nextflow%20DSL2-%E2%89%A5{new_version}-23aa62.svg", - ), - ( - # example: 1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=20.04.0`) - rf"1\.\s*Install\s*\[`Nextflow`\]\(https:\/\/www\.nextflow\.io\/docs\/latest\/getstarted\.html#installation\)\s*\(`>={re.escape(current_version)}`\)", - f"1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>={new_version}`)", - ), + ) ], ) -def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patterns: List[Tuple[str, str]]) -> None: - """Updates the version number in a requested file. +def update_file_version( + filename: Union[str, Path], + pipeline_obj: Pipeline, + patterns: List[Tuple[str, str]], + required: bool = True, + yaml_key: Optional[List[str]] = None, +) -> None: + """ + Updates a file with a new version number. Args: - filename (str): File to scan. - pipeline_obj (nf_core.pipelines.lint.PipelineLint): A PipelineLint object that holds information - about the pipeline contents and build files. - pattern (str): Regex pattern to apply. - - Raises: - ValueError, if the version number cannot be found. + filename (str): The name of the file to update. + pipeline_obj (nf_core.utils.Pipeline): A `Pipeline` object that holds information + about the pipeline contents. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + required (bool, optional): Whether the file is required to exist. Defaults to `True`. + yaml_key (Optional[List[str]], optional): The YAML key to update. Defaults to `None`. """ - # Load the file - fn = pipeline_obj._fp(filename) - content = "" - try: - with open(fn) as fh: - content = fh.read() - except FileNotFoundError: + fn: Path = pipeline_obj._fp(filename) + + if not fn.exists(): log.warning(f"File not found: '{fn}'") return - replacements = [] - for pattern in patterns: - found_match = False + if yaml_key: + update_yaml_file(fn, patterns, yaml_key, required) + else: + update_text_file(fn, patterns, required) - newcontent = [] - for line in content.splitlines(): - # Match the pattern - matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) - if matches_pattern: - found_match = True - # Replace the match - newline = re.sub(pattern[0], pattern[1], line) - newcontent.append(newline) +def update_yaml_file(fn: Path, patterns: List[Tuple[str, str]], yaml_key: List[str], required: bool): + """ + Updates a YAML file with a new version number. - # Save for logging - replacements.append((line, newline)) + Args: + fn (Path): The name of the file to update. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + yaml_key (List[str]): The YAML key to update. + required (bool): Whether the file is required to exist. + """ + yaml = YAML() + yaml.preserve_quotes = True + with open(fn) as file: + yaml_content = yaml.load(file) + + try: + target = yaml_content + for key in yaml_key[:-1]: + target = target[key] - # No match, keep line as it is + last_key = yaml_key[-1] + current_value = target[last_key] + + new_value = current_value + for pattern, replacement in patterns: + # check if current value is list + if isinstance(current_value, list): + new_value = [re.sub(pattern, replacement, item) for item in current_value] else: - newcontent.append(line) + new_value = re.sub(pattern, replacement, current_value) - if found_match: - content = "\n".join(newcontent) + "\n" - else: - log.error(f"Could not find version number in {filename}: `{pattern}`") + if new_value != current_value: + target[last_key] = new_value + with open(fn, "w") as file: + yaml.dump(yaml_content, file) + log.info(f"Updated version in YAML file '{fn}'") + log_change(str(current_value), str(new_value)) + except KeyError as e: + handle_error(f"Could not find key {e} in the YAML structure of {fn}", required) - log.info(f"Updated version in '{filename}'") - for replacement in replacements: - stderr.print(f" [red] - {replacement[0].strip()}", highlight=False) - stderr.print(f" [green] + {replacement[1].strip()}", highlight=False) - stderr.print("\n") - with open(fn, "w") as fh: - fh.write(content) +def update_text_file(fn: Path, patterns: List[Tuple[str, str]], required: bool): + """ + Updates a text file with a new version number. + + Args: + fn (Path): The name of the file to update. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + required (bool): Whether the file is required to exist. + """ + with open(fn) as file: + content = file.read() + + updated = False + for pattern, replacement in patterns: + new_content, count = re.subn(pattern, replacement, content) + if count > 0: + log_change(content, new_content) + content = new_content + updated = True + log.info(f"Updated version in '{fn}'") + log.debug(f"Replaced pattern '{pattern}' with '{replacement}' {count} times") + elif required: + handle_error(f"Could not find version number in {fn}: `{pattern}`", required) + + if updated: + with open(fn, "w") as file: + file.write(content) + + +def handle_error(message: str, required: bool): + if required: + raise ValueError(message) + else: + log.info(message) + + +def log_change(old_content: str, new_content: str): + old_lines = old_content.splitlines() + new_lines = new_content.splitlines() + + for old_line, new_line in zip(old_lines, new_lines): + if old_line != new_line: + stderr.print(f" [red] - {old_line.strip()}", highlight=False) + stderr.print(f" [green] + {new_line.strip()}", highlight=False) + + stderr.print("\n") diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 8b0edf34cf..6a610ccccb 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,14 +1,11 @@ """A Textual app to create a pipeline.""" import logging -from pathlib import Path import click -import yaml from textual.app import App -from textual.widgets import Button +from textual.widgets import Button, Switch -import nf_core from nf_core.pipelines.create import utils from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline @@ -41,11 +38,12 @@ class PipelineCreateApp(App[utils.CreateConfig]): """A Textual app to manage stopwatches.""" CSS_PATH = "create.tcss" - TITLE = "nf-core create" + TITLE = "nf-core pipelines create" SUB_TITLE = "Create a new pipeline with the nf-core pipeline template" BINDINGS = [ ("d", "toggle_dark", "Toggle dark mode"), ("q", "quit", "Quit"), + ("a", "toggle_all", "Toggle all"), ] SCREENS = { "welcome": WelcomeScreen(), @@ -105,3 +103,14 @@ def on_button_pressed(self, event: Button.Pressed) -> None: def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" self.dark: bool = not self.dark + + def action_toggle_all(self) -> None: + """An action to toggle all Switches.""" + switches = self.query(Switch) + if not switches: + return # No Switches widgets found + # Determine the new state based on the first switch + new_state = not switches.first().value if switches.first() else True + for switch in switches: + switch.value = new_state + self.refresh() diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 05b04a5422..8ab547c1cc 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -70,6 +70,8 @@ def __init__( self.config = CreateConfig(**config_yml["template"].model_dump()) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") + # Update the output directory + self.config.outdir = outdir if outdir else Path().cwd() except (FileNotFoundError, UserWarning): log.debug("The '.nf-core.yml' configuration file was not found.") elif (name and description and author) or ( @@ -180,7 +182,7 @@ def update_config(self, organisation, version, force, outdir): self.config.force = force if force else False if self.config.outdir is None: self.config.outdir = outdir if outdir else "." - if self.config.is_nfcore is None: + if self.config.is_nfcore is None or self.config.is_nfcore == "null": self.config.is_nfcore = self.config.org == "nf-core" def obtain_jinja_params_dict( @@ -361,14 +363,12 @@ def render_template(self) -> None: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - self.config.outdir = str(self.config.outdir) config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - run_prettier_on_file(self.outdir / config_fn) # Run prettier on files - run_prettier_on_file(self.outdir) + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) def fix_linting(self): """ @@ -399,8 +399,6 @@ def fix_linting(self): with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) - run_prettier_on_file(Path(self.outdir, config_fn)) - def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" email_logo_path = Path(self.outdir) / "assets" diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 5debcfee7f..e433db41ec 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -2,9 +2,9 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, ScrollableContainer +from textual.containers import Center, Horizontal, ScrollableContainer from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Switch +from textual.widgets import Button, Footer, Header, Markdown, Static, Switch from nf_core.pipelines.create.utils import PipelineFeature @@ -22,7 +22,13 @@ def compose(self) -> ComposeResult: """ ) ) + yield Horizontal( + Switch(id="toggle_all", value=True), + Static("Toggle all features", classes="feature_title"), + classes="custom_grid", + ) yield ScrollableContainer(id="features") + yield Center( Button("Back", id="back", variant="default"), Button("Continue", id="continue", variant="success"), @@ -35,6 +41,7 @@ def on_mount(self) -> None: self.query_one("#features").mount( PipelineFeature(feature["help_text"], feature["short_description"], feature["description"], name) ) + self.query_one("#toggle_all", Switch).value = True @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: @@ -45,3 +52,10 @@ def on_button_pressed(self, event: Button.Pressed) -> None: if not this_switch.value: skip.append(this_switch.id) self.parent.TEMPLATE_CONFIG.__dict__.update({"skip_features": skip, "is_nfcore": False}) + + @on(Switch.Changed, "#toggle_all") + def on_toggle_all(self, event: Switch.Changed) -> None: + """Handle toggling all switches.""" + new_state = event.value + for feature in self.query("PipelineFeature"): + feature.query_one(Switch).value = new_state diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/template_features.yml similarity index 85% rename from nf_core/pipelines/create/templatefeatures.yml rename to nf_core/pipelines/create/template_features.yml index 82ec111e42..3eb6547265 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -1,10 +1,18 @@ github: skippable_paths: - - ".github/" - - ".gitignore" - short_description: "Skip the creation of a local Git repository." - description: "" - help_text: "" + - ".github" + - ".gitattributes" + short_description: "Use a GitHub repository." + description: "Create a GitHub repository for the pipeline." + help_text: | + This will create a GitHub repository for the pipeline. + + The repository will include: + - Continuous Integration (CI) tests + - Issues and pull requests templates + + The initialisation of a git repository is required to use the nf-core/tools. + This means that even if you unselect this option, your pipeline will still contain a `.git` directory and `.gitignore` file. linting: files_exist: - ".github/ISSUE_TEMPLATE/bug_report.yml" @@ -12,7 +20,6 @@ github: - ".github/PULL_REQUEST_TEMPLATE.md" - ".github/CONTRIBUTING.md" - ".github/.dockstore.yml" - - ".gitignore" files_unchanged: - ".github/ISSUE_TEMPLATE/bug_report.yml" - ".github/ISSUE_TEMPLATE/config.yml" @@ -26,7 +33,7 @@ github: readme: - "nextflow_badge" nfcore_pipelines: False - custom_pipelines: False + custom_pipelines: True ci: skippable_paths: - ".github/workflows/" @@ -53,6 +60,7 @@ ci: igenomes: skippable_paths: - "conf/igenomes.config" + - "conf/igenomes_ignored.config" short_description: "Use reference genomes" description: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes" help_text: | @@ -69,6 +77,7 @@ igenomes: linting: files_exist: - "conf/igenomes.config" + - "conf/igenomes_ignored.config" nfcore_pipelines: True custom_pipelines: True github_badges: @@ -111,6 +120,7 @@ nf_core_configs: - "custom_config" - "params.custom_config_version" - "params.custom_config_base" + included_configs: False nfcore_pipelines: False custom_pipelines: True is_nfcore: @@ -141,6 +151,10 @@ is_nfcore: nextflow_config: - "manifest.name" - "manifest.homePage" + - "validation.help.beforeText" + - "validation.help.afterText" + - "validation.summary.beforeText" + - "validation.summary.afterText" multiqc_config: - "report_comment" nfcore_pipelines: False @@ -172,6 +186,7 @@ code_linters: citations: skippable_paths: - "assets/methods_description_template.yml" + - "CITATIONS.md" short_description: "Include citations" description: "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled)." help_text: | @@ -179,6 +194,9 @@ citations: Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, and the logics to add this section to the output MultiQC report (if the report is generated). + linting: + files_exist: + - "CITATIONS.md" nfcore_pipelines: False custom_pipelines: True gitpod: @@ -238,6 +256,29 @@ fastqc: The pipeline will include the FastQC module. nfcore_pipelines: True custom_pipelines: True +modules: + skippable_paths: + - "conf/base.config" + - "conf/modules.config" + - "modules.json" + - "modules" + - "subworkflows" + short_description: "Use nf-core components" + description: "Include all required files to use nf-core modules and subworkflows" + help_text: | + It is *recommended* to use this feature if you want to use modules and subworkflows in your pipeline. + This will add all required files to use nf-core components or any compatible components from private repos by using `nf-core modules` and `nf-core subworkflows` commands. + linting: + nfcore_components: False + modules_json: False + base_config: False + modules_config: False + files_exist: + - "conf/base.config" + - "conf/modules.config" + - "modules.json" + nfcore_pipelines: False + custom_pipelines: True changelog: skippable_paths: - "CHANGELOG.md" @@ -252,6 +293,27 @@ changelog: - "CHANGELOG.md" nfcore_pipelines: False custom_pipelines: True +nf_schema: + skippable_paths: + - "subworkflows/nf-core/utils_nfschema_plugin" + - "nextflow_schema.json" + - "assets/schema_input.json" + - "assets/samplesheet.csv" + short_description: "Use nf-schema" + description: "Use the nf-schema Nextflow plugin for this pipeline." + help_text: | + [nf-schema](https://nextflow-io.github.io/nf-schema/latest/) is used to validate input parameters based on a JSON schema. + It also provides helper functionality to create help messages, get a summary + of changed parameters and validate and convert a samplesheet to a channel. + linting: + files_exist: + - "nextflow_schema.json" + schema_params: False + schema_lint: False + schema_description: False + nextflow_config: False + nfcore_pipelines: True + custom_pipelines: True license: skippable_paths: - "LICENSE" diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 0b72c2bcf5..9b331c2a3b 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -37,7 +37,7 @@ def init_context(value: Dict[str, Any]) -> Iterator[None]: NFCORE_PIPELINE_GLOBAL: bool = True # YAML file describing template features -features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" +features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "template_features.yml" class CreateConfig(NFCoreTemplateConfig): diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 97453b127e..b9028d4b3a 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -133,10 +133,8 @@ def __init__( self.force = force self.platform = platform self.fullname: Optional[str] = None - # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. - # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --platform or if prompt is suppressed by !stderr.is_interactive). - # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". - self.include_configs = True if download_configuration else False if bool(platform) else None + # downloading configs is not supported for Seqera Platform downloads. + self.include_configs = True if download_configuration == "yes" and not bool(platform) else False # Additional tags to add to the downloaded pipeline. This enables to mark particular commits or revisions with # additional tags, e.g. "stable", "testing", "validated", "production" etc. Since this requires a git-repo, it is only # available for the bare / Seqera Platform download. @@ -748,7 +746,7 @@ def find_container_images(self, workflow_directory: str) -> None: self.nf_config is needed, because we need to restart search over raw input if no proper container matches are found. """ - config_findings.append((k, v.strip('"').strip("'"), self.nf_config, "Nextflow configs")) + config_findings.append((k, v.strip("'\""), self.nf_config, "Nextflow configs")) # rectify the container paths found in the config # Raw config_findings may yield multiple containers, so better create a shallow copy of the list, since length of input and output may be different ?!? diff --git a/nf_core/pipelines/launch.py b/nf_core/pipelines/launch.py index e03982a25a..a80639ea94 100644 --- a/nf_core/pipelines/launch.py +++ b/nf_core/pipelines/launch.py @@ -263,15 +263,21 @@ def set_schema_inputs(self): def merge_nxf_flag_schema(self): """Take the Nextflow flag schema and merge it with the pipeline schema""" + if "allOf" not in self.schema_obj.schema: + self.schema_obj.schema["allOf"] = [] # Add the coreNextflow subschema to the schema definitions - if "definitions" not in self.schema_obj.schema: + if "$defs" in self.schema_obj.schema or "definitions" not in self.schema_obj.schema: + if "$defs" not in self.schema_obj.schema: + self.schema_obj["$defs"] = {} + self.schema_obj.schema["$defs"].update(self.nxf_flag_schema) + self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/$defs/coreNextflow"}) + + if "definitions" in self.schema_obj.schema: self.schema_obj.schema["definitions"] = {} - self.schema_obj.schema["definitions"].update(self.nxf_flag_schema) + self.schema_obj.schema["definitions"].update(self.nxf_flag_schema) + self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) # Add the new defintion to the allOf key so that it's included in validation # Put it at the start of the list so that it comes first - if "allOf" not in self.schema_obj.schema: - self.schema_obj.schema["allOf"] = [] - self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) def prompt_web_gui(self): """Ask whether to use the web-based or cli wizard to collect params""" @@ -379,7 +385,8 @@ def sanitise_web_response(self): for param_id, param_obj in self.schema_obj.schema.get("properties", {}).items(): questionary_objects[param_id] = self.single_param_to_questionary(param_id, param_obj, print_help=False) - for _, definition in self.schema_obj.schema.get("definitions", {}).items(): + definitions_schemas = self.schema_obj.schema.get("$defs", self.schema_obj.schema.get("definitions", {})).items() + for _, definition in definitions_schemas: for param_id, param_obj in definition.get("properties", {}).items(): questionary_objects[param_id] = self.single_param_to_questionary(param_id, param_obj, print_help=False) @@ -399,9 +406,10 @@ def prompt_schema(self): """Go through the pipeline schema and prompt user to change defaults""" answers = {} # Start with the subschema in the definitions - use order of allOf + definitions_schemas = self.schema_obj.schema.get("$defs", self.schema_obj.schema.get("definitions", {})).items() for allOf in self.schema_obj.schema.get("allOf", []): d_key = allOf["$ref"][14:] - answers.update(self.prompt_group(d_key, self.schema_obj.schema["definitions"][d_key])) + answers.update(self.prompt_group(d_key, definitions_schemas[d_key])) # Top level schema params for param_id, param_obj in self.schema_obj.schema.get("properties", {}).items(): diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index ed833d3219..8cc7c37cb2 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -9,7 +9,7 @@ import logging import os from pathlib import Path -from typing import List, Tuple, Union +from typing import List, Optional, Tuple, Union import git import rich @@ -37,6 +37,7 @@ from .configs import base_config, modules_config from .files_exist import files_exist from .files_unchanged import files_unchanged +from .included_configs import included_configs from .merge_markers import merge_markers from .modules_json import modules_json from .modules_structure import modules_structure @@ -45,6 +46,7 @@ from .nfcore_yml import nfcore_yml from .pipeline_name_conventions import pipeline_name_conventions from .pipeline_todos import pipeline_todos +from .plugin_includes import plugin_includes from .readme import readme from .schema_description import schema_description from .schema_lint import schema_lint @@ -92,6 +94,7 @@ class PipelineLint(nf_core.utils.Pipeline): nfcore_yml = nfcore_yml pipeline_name_conventions = pipeline_name_conventions pipeline_todos = pipeline_todos + plugin_includes = plugin_includes readme = readme schema_description = schema_description schema_lint = schema_lint @@ -99,6 +102,7 @@ class PipelineLint(nf_core.utils.Pipeline): system_exit = system_exit template_strings = template_strings version_consistency = version_consistency + included_configs = included_configs def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False @@ -135,6 +139,7 @@ def _get_all_lint_tests(release_mode): "actions_awsfulltest", "readme", "pipeline_todos", + "plugin_includes", "pipeline_name_conventions", "template_strings", "schema_lint", @@ -149,7 +154,7 @@ def _get_all_lint_tests(release_mode): "base_config", "modules_config", "nfcore_yml", - ] + (["version_consistency"] if release_mode else []) + ] + (["version_consistency", "included_configs"] if release_mode else []) def _load(self) -> bool: """Load information about the pipeline into the PipelineLint object""" @@ -174,7 +179,8 @@ def _load_lint_config(self) -> bool: # Check if we have any keys that don't match lint test names if self.lint_config is not None: for k in self.lint_config: - if k not in self.lint_tests: + if k != "nfcore_components" and k not in self.lint_tests: + # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") is_correct = False @@ -543,7 +549,7 @@ def run_linting( md_fn=None, json_fn=None, hide_progress: bool = False, -) -> Tuple[PipelineLint, ComponentLint, Union[ComponentLint, None]]: +) -> Tuple[PipelineLint, Optional[ComponentLint], Optional[ComponentLint]]: """Runs all nf-core linting checks on a given Nextflow pipeline project in either `release` mode or `normal` mode (default). Returns an object of type :class:`PipelineLint` after finished. @@ -588,41 +594,45 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - # Create the modules lint object - module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir, hide_progress=hide_progress) - # Create the subworkflows lint object - try: - subworkflow_lint_obj = nf_core.subworkflows.lint.SubworkflowLint(pipeline_dir, hide_progress=hide_progress) - except LookupError: + if "nfcore_components" in lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: + module_lint_obj = None subworkflow_lint_obj = None - - # Verify that the pipeline is correctly configured and has a modules.json file - module_lint_obj.has_valid_directory() - module_lint_obj.has_modules_file() - # Run only the tests we want - if key: - # Select only the module lint tests - module_lint_tests = list( - set(key).intersection(set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True))) - ) - # Select only the subworkflow lint tests - subworkflow_lint_tests = list( - set(key).intersection( - set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)) - ) - ) else: - # If no key is supplied, run the default modules tests - module_lint_tests = list(("module_changes", "module_version")) - subworkflow_lint_tests = list(("subworkflow_changes", "subworkflow_version")) - module_lint_obj.filter_tests_by_key(module_lint_tests) - if subworkflow_lint_obj is not None: - subworkflow_lint_obj.filter_tests_by_key(subworkflow_lint_tests) - - # Set up files for component linting test - module_lint_obj.set_up_pipeline_files() - if subworkflow_lint_obj is not None: - subworkflow_lint_obj.set_up_pipeline_files() + # Create the modules lint object + module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir, hide_progress=hide_progress) + # Create the subworkflows lint object + try: + subworkflow_lint_obj = nf_core.subworkflows.lint.SubworkflowLint(pipeline_dir, hide_progress=hide_progress) + except LookupError: + subworkflow_lint_obj = None + + # Verify that the pipeline is correctly configured and has a modules.json file + module_lint_obj.has_valid_directory() + module_lint_obj.has_modules_file() + # Run only the tests we want + if key: + # Select only the module lint tests + module_lint_tests = list( + set(key).intersection(set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True))) + ) + # Select only the subworkflow lint tests + subworkflow_lint_tests = list( + set(key).intersection( + set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)) + ) + ) + else: + # If no key is supplied, run the default modules tests + module_lint_tests = list(("module_changes", "module_version")) + subworkflow_lint_tests = list(("subworkflow_changes", "subworkflow_version")) + module_lint_obj.filter_tests_by_key(module_lint_tests) + if subworkflow_lint_obj is not None: + subworkflow_lint_obj.filter_tests_by_key(subworkflow_lint_tests) + + # Set up files for component linting test + module_lint_obj.set_up_pipeline_files() + if subworkflow_lint_obj is not None: + subworkflow_lint_obj.set_up_pipeline_files() # Run the pipeline linting tests try: @@ -632,13 +642,14 @@ def run_linting( log.info("Stopping tests...") return lint_obj, module_lint_obj, subworkflow_lint_obj - # Run the module lint tests - if len(module_lint_obj.all_local_components) > 0: - module_lint_obj.lint_modules(module_lint_obj.all_local_components, local=True) - if len(module_lint_obj.all_remote_components) > 0: - module_lint_obj.lint_modules(module_lint_obj.all_remote_components, local=False) - # Run the subworkflows lint tests + if module_lint_obj is not None: + # Run the module lint tests + if len(module_lint_obj.all_local_components) > 0: + module_lint_obj.lint_modules(module_lint_obj.all_local_components, local=True) + if len(module_lint_obj.all_remote_components) > 0: + module_lint_obj.lint_modules(module_lint_obj.all_remote_components, local=False) if subworkflow_lint_obj is not None: + # Run the subworkflows lint tests if len(subworkflow_lint_obj.all_local_components) > 0: subworkflow_lint_obj.lint_subworkflows(subworkflow_lint_obj.all_local_components, local=True) if len(subworkflow_lint_obj.all_remote_components) > 0: @@ -646,7 +657,8 @@ def run_linting( # Print the results lint_obj._print_results(show_passed) - module_lint_obj._print_results(show_passed, sort_by=sort_by) + if module_lint_obj is not None: + module_lint_obj._print_results(show_passed, sort_by=sort_by) if subworkflow_lint_obj is not None: subworkflow_lint_obj._print_results(show_passed, sort_by=sort_by) nf_core.pipelines.lint_utils.print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index ad0605dcf7..19c2498263 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -1,6 +1,6 @@ import logging from pathlib import Path -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Union log = logging.getLogger(__name__) @@ -66,6 +66,7 @@ def files_exist(self) -> Dict[str, List[str]]: conf/igenomes.config .github/workflows/awstest.yml .github/workflows/awsfulltest.yml + ro-crate-metadata.json Files that *must not* be present, due to being renamed or removed in the template: @@ -87,6 +88,7 @@ def files_exist(self) -> Dict[str, List[str]]: lib/Workflow.groovy lib/WorkflowMain.groovy lib/WorkflowPIPELINE.groovy + lib/nfcore_external_java_deps.jar parameters.settings.json pipeline_template.yml # saving information in .nf-core.yml Singularity @@ -98,12 +100,6 @@ def files_exist(self) -> Dict[str, List[str]]: .travis.yml - Files that *must not* be present if a certain entry is present in ``nextflow.config``: - - .. code-block:: bash - - lib/nfcore_external_java_deps.jar # if "nf-validation" is in nextflow.config - .. tip:: You can configure the ``nf-core pipelines lint`` tests to ignore any of these checks by setting the ``files_exist`` key as follows in your ``.nf-core.yml`` config file. For example: @@ -172,9 +168,11 @@ def files_exist(self) -> Dict[str, List[str]]: [Path("assets", "multiqc_config.yml")], [Path("conf", "base.config")], [Path("conf", "igenomes.config")], + [Path("conf", "igenomes_ignored.config")], [Path(".github", "workflows", "awstest.yml")], [Path(".github", "workflows", "awsfulltest.yml")], [Path("modules.json")], + [Path("ro-crate-metadata.json")], ] # List of strings. Fails / warns if any of the strings exist. @@ -198,12 +196,16 @@ def files_exist(self) -> Dict[str, List[str]]: Path("parameters.settings.json"), Path("pipeline_template.yml"), # saving information in .nf-core.yml Path("Singularity"), + Path("lib", "nfcore_external_java_deps.jar"), ] files_warn_ifexists = [Path(".travis.yml")] - files_fail_ifinconfig: List[Tuple[Path, List[Dict[str, str]]]] = [ - (Path("lib", "nfcore_external_java_deps.jar"), [{"plugins": "nf-validation"}, {"plugins": "nf-schema"}]), - ] + files_hint = [ + [ + ["ro-crate-metadata.json"], + ". Run `nf-core rocrate` to generate this file. Read more about RO-Crates in the [nf-core/tools docs](https://nf-co.re/tools#create-a-ro-crate-metadata-file).", + ], + ] # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] @@ -231,7 +233,11 @@ def pf(file_path: Union[str, Path]) -> Path: if any([pf(f).is_file() for f in files]): passed.append(f"File found: {self._wrap_quotes(files)}") else: - warned.append(f"File not found: {self._wrap_quotes(files)}") + hint = "" + for file_hint in files_hint: + if file_hint[0] == files: + hint = str(file_hint[1]) + warned.append(f"File not found: {self._wrap_quotes(files)}{hint}") # Files that cause an error if they exist for file in files_fail_ifexists: @@ -241,24 +247,7 @@ def pf(file_path: Union[str, Path]) -> Path: failed.append(f"File must be removed: {self._wrap_quotes(file)}") else: passed.append(f"File not found check: {self._wrap_quotes(file)}") - # Files that cause an error if they exists together with a certain entry in nextflow.config - for file_cond in files_fail_ifinconfig: - if str(file_cond[0]) in ignore_files: - continue - in_config = False - for condition in file_cond[1]: - config_key, config_value = list(condition.items())[0] - if config_key in self.nf_config and config_value in self.nf_config[config_key]: - log.debug(f"Found {config_key} in nextflow.config with value {config_value}") - in_config = True - if pf(file_cond[0]).is_file() and in_config: - failed.append(f"File must be removed: {self._wrap_quotes(file_cond[0])}") - elif pf(file_cond[0]).is_file() and not in_config: - passed.append(f"File found check: {self._wrap_quotes(file_cond[0])}") - elif not pf(file_cond[0]).is_file() and not in_config: - failed.append(f"File not found check: {self._wrap_quotes(file_cond[0])}") - elif not pf(file_cond[0]).is_file() and in_config: - passed.append(f"File not found check: {self._wrap_quotes(file_cond[0])}") + # Files that cause a warning if they exist for file in files_warn_ifexists: if str(file) in ignore_files: diff --git a/nf_core/pipelines/lint/included_configs.py b/nf_core/pipelines/lint/included_configs.py new file mode 100644 index 0000000000..75c4594f41 --- /dev/null +++ b/nf_core/pipelines/lint/included_configs.py @@ -0,0 +1,36 @@ +from pathlib import Path + + +def included_configs(self): + """Check that the pipeline nextflow.config includes the pipeline custom configs. + + If the include line is uncommented, the test passes. + If the include line is commented, the test fails. + If the include line is missing, the test warns. + + Can be skipped by adding the following to the .nf-core.yml file: + lint: + included_configs: False + """ + passed = [] + failed = [] + warned = [] + + config_file = Path(self.wf_path / "nextflow.config") + + with open(config_file) as fh: + config = fh.read() + if ( + f"// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + failed.append("Pipeline config does not include custom configs. Please uncomment the includeConfig line.") + elif ( + f"includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + passed.append("Pipeline config includes custom configs.") + else: + warned.append("Pipeline config does not include custom configs. Please add the includeConfig line.") + + return {"passed": passed, "failed": failed, "warned": warned} diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 96323af94d..6ae55501b2 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -1,3 +1,4 @@ +import ast import logging import re from pathlib import Path @@ -65,14 +66,6 @@ def nextflow_config(self) -> Dict[str, List[str]]: * Should always be set to default value: ``https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}`` - * ``params.validationShowHiddenParams`` - - * Determines whether boilerplate params are showed by schema. Set to ``false`` by default - - * ``params.validationSchemaIgnoreParams`` - - * A comma separated string of inputs the schema validation should ignore. - **The following variables throw warnings if missing:** * ``manifest.mainScript``: The filename of the main pipeline script (should be ``main.nf``) @@ -87,6 +80,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: * ``params.nf_required_version``: The old method for specifying the minimum Nextflow version. Replaced by ``manifest.nextflowVersion`` * ``params.container``: The old method for specifying the dockerhub container address. Replaced by ``process.container`` * ``igenomesIgnore``: Changed to ``igenomes_ignore`` + * ``params.max_cpus``: Old method of specifying the maximum number of CPUs a process can request. Replaced by native Nextflow `resourceLimits`directive in config files. + * ``params.max_memory``: Old method of specifying the maximum number of memory can request. Replaced by native Nextflow `resourceLimits`directive. + * ``params.max_time``: Old method of specifying the maximum number of CPUs can request. Replaced by native Nextflow `resourceLimits`directive. .. tip:: The ``snake_case`` convention should now be used when defining pipeline parameters @@ -151,8 +147,6 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["process.time"], ["params.outdir"], ["params.input"], - ["params.validationShowHiddenParams"], - ["params.validationSchemaIgnoreParams"], ] # Throw a warning if these are missing config_warn = [ @@ -170,8 +164,54 @@ def nextflow_config(self) -> Dict[str, List[str]]: "params.igenomesIgnore", "params.name", "params.enable_conda", + "params.max_cpus", + "params.max_memory", + "params.max_time", ] + # Lint for plugins + config_plugins = ast.literal_eval(self.nf_config.get("plugins", "[]")) + found_plugins = [] + for plugin in config_plugins: + if "@" not in plugin: + failed.append(f"Plugin '{plugin}' does not have a pinned version") + found_plugins.append(plugin.split("@")[0]) + + if "nf-validation" in found_plugins or "nf-schema" in found_plugins: + if "nf-validation" in found_plugins and "nf-schema" in found_plugins: + failed.append("nextflow.config contains both nf-validation and nf-schema") + + if "nf-schema" in found_plugins: + passed.append("Found nf-schema plugin") + if self.nf_config.get("validation.help.enabled", "false") == "false": + failed.append( + "The help message has not been enabled. Set the `validation.help.enabled` configuration option to `true` to enable help messages" + ) + config_fail.extend([["validation.help.enabled"]]) + config_warn.extend( + [ + ["validation.help.beforeText"], + ["validation.help.afterText"], + ["validation.help.command"], + ["validation.summary.beforeText"], + ["validation.summary.afterText"], + ] + ) + config_fail_ifdefined.extend( + [ + "params.validationFailUnrecognisedParams", + "params.validationLenientMode", + "params.validationSchemaIgnoreParams", + "params.validationShowHiddenParams", + ] + ) + + if "nf-validation" in found_plugins: + passed.append("Found nf-validation plugin") + warned.append( + "nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/" + ) + # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("nextflow_config", []) if self.lint_config is not None else [] @@ -306,7 +346,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") # Check that lines for loading custom profiles exist - lines = [ + old_lines = [ r"// Load nf-core custom profiles from different Institutions", r"try {", r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', @@ -314,11 +354,19 @@ def nextflow_config(self) -> Dict[str, List[str]]: r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', r"}", ] + lines = [ + r"// Load nf-core custom profiles from different Institutions", + r'''includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null"''', + ] path = Path(self.wf_path, "nextflow.config") i = 0 with open(path) as f: for line in f: - if lines[i] in line: + if old_lines[i] in line: + i += 1 + if i == len(old_lines): + break + elif lines[i] in line: i += 1 if i == len(lines): break @@ -326,6 +374,12 @@ def nextflow_config(self) -> Dict[str, List[str]]: i = 0 if i == len(lines): passed.append("Lines for loading custom profiles found") + elif i == len(old_lines): + failed.append( + "Old lines for loading custom profiles found. File should contain: ```groovy\n{}".format( + "\n".join(lines) + ) + ) else: lines[2] = f"\t{lines[2]}" lines[4] = f"\t{lines[4]}" @@ -411,6 +465,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: f"Config default value incorrect: `{param}` is set as {self._wrap_quotes(schema_default)} in `nextflow_schema.json` but is {self._wrap_quotes(self.nf_config[param])} in `nextflow.config`." ) else: + schema_default = str(schema.schema_defaults[param_name]) failed.append( f"Default value from the Nextflow schema `{param} = {self._wrap_quotes(schema_default)}` not found in `nextflow.config`." ) diff --git a/nf_core/pipelines/lint/plugin_includes.py b/nf_core/pipelines/lint/plugin_includes.py new file mode 100644 index 0000000000..4fc40ae26c --- /dev/null +++ b/nf_core/pipelines/lint/plugin_includes.py @@ -0,0 +1,44 @@ +import ast +import glob +import logging +import re +from typing import Dict, List + +log = logging.getLogger(__name__) + + +def plugin_includes(self) -> Dict[str, List[str]]: + """Checks the include statements in the all *.nf files for plugin includes + + When nf-schema is used in an nf-core pipeline, the include statements of the plugin + functions have to use nf-schema instead of nf-validation and vice versa + """ + config_plugins = [plugin.split("@")[0] for plugin in ast.literal_eval(self.nf_config.get("plugins", "[]"))] + validation_plugin = "nf-validation" if "nf-validation" in config_plugins else "nf-schema" + + passed: List[str] = [] + warned: List[str] = [] + failed: List[str] = [] + ignored: List[str] = [] + + plugin_include_pattern = re.compile(r"^include\s*{[^}]+}\s*from\s*[\"']plugin/([^\"']+)[\"']\s*$", re.MULTILINE) + workflow_files = [ + file for file in glob.glob(f"{self.wf_path}/**/*.nf", recursive=True) if not file.startswith("./modules/") + ] + test_passed = True + for file in workflow_files: + with open(file) as of: + plugin_includes = re.findall(plugin_include_pattern, of.read()) + for include in plugin_includes: + if include not in ["nf-validation", "nf-schema"]: + continue + if include != validation_plugin: + test_passed = False + failed.append( + f"Found a `{include}` plugin import in `{file[2:]}`, but `{validation_plugin}` was used in `nextflow.config`" + ) + + if test_passed: + passed.append("No wrong validation plugin imports have been found") + + return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index 4c16243690..bdfad5200f 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -36,7 +36,7 @@ def readme(self): if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) diff --git a/nf_core/pipelines/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py index d617e40949..b586cc5242 100644 --- a/nf_core/pipelines/lint/schema_description.py +++ b/nf_core/pipelines/lint/schema_description.py @@ -36,8 +36,9 @@ def schema_description(self): warned.append(f"Ungrouped param in schema: `{up}`") # Iterate over groups and add warning for parameters without a description - for group_key in self.schema_obj.schema["definitions"].keys(): - group = self.schema_obj.schema["definitions"][group_key] + defs_notation = self.schema_obj.defs_notation + for group_key in self.schema_obj.schema[defs_notation].keys(): + group = self.schema_obj.schema[defs_notation][group_key] for param_key, param in group["properties"].items(): if param_key in ignore_params: ignored.append(f"Ignoring description check for param in schema: `{param_key}`") diff --git a/nf_core/pipelines/lint/schema_lint.py b/nf_core/pipelines/lint/schema_lint.py index 6786c5012d..4007bf8fe5 100644 --- a/nf_core/pipelines/lint/schema_lint.py +++ b/nf_core/pipelines/lint/schema_lint.py @@ -16,26 +16,26 @@ def schema_lint(self): The lint test checks the schema for the following: * Schema should be a valid JSON file - * Schema should adhere to `JSONSchema `_, Draft 7. + * Schema should adhere to `JSONSchema `_, Draft 7 or Draft 2020-12. * Parameters can be described in two places: * As ``properties`` in the top-level schema object - * As ``properties`` within subschemas listed in a top-level ``definitions`` objects + * As ``properties`` within subschemas listed in a top-level ``definitions``(draft 7) or ``$defs``(draft 2020-12) objects * The schema must describe at least one parameter * There must be no duplicate parameter IDs across the schema and definition subschema - * All subschema in ``definitions`` must be referenced in the top-level ``allOf`` key + * All subschema in ``definitions`` or ``$defs`` must be referenced in the top-level ``allOf`` key * The top-level ``allOf`` key must not describe any non-existent definitions * Default parameters in the schema must be valid * Core top-level schema attributes should exist and be set as follows: - * ``$schema``: ``https://json-schema.org/draft-07/schema`` + * ``$schema``: ``https://json-schema.org/draft-07/schema`` or ``https://json-schema.org/draft/2020-12/schema`` * ``$id``: URL to the raw schema file, eg. ``https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`` * ``title``: ``YOURPIPELINE pipeline parameters`` * ``description``: The pipeline config ``manifest.description`` * That the ``input`` property is defined and has a mimetype. A list of common mimetypes can be found `here `_. - For example, an *extremely* minimal schema could look like this: + For example, an *extremely* minimal schema could look like this (draft 7): .. code-block:: json @@ -57,6 +57,28 @@ def schema_lint(self): "allOf": [{"$ref": "#/definitions/my_first_group"}] } + Or this (draft 2020-12): + + .. code-block:: json + + { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json", + "title": "YOURPIPELINE pipeline parameters", + "description": "This pipeline is for testing", + "properties": { + "first_param": { "type": "string" } + }, + "$defs": { + "my_first_group": { + "properties": { + "second_param": { "type": "string" } + } + } + }, + "allOf": [{"$ref": "#/$defs/my_first_group"}] + } + .. tip:: You can check your pipeline schema without having to run the entire pipeline lint by running ``nf-core pipelines schema lint`` instead of ``nf-core pipelines lint`` """ diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 37a1f64daf..11c5e82516 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -5,7 +5,7 @@ def template_strings(self): """Check for template placeholders. - The ``nf-core create`` pipeline template uses + The ``nf-core pipelines create`` pipeline template uses `Jinja `_ behind the scenes. This lint test fails if any Jinja template variables such as diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index ccab76295f..b4c56c6007 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -2,9 +2,10 @@ import logging import subprocess from pathlib import Path -from typing import List +from typing import List, Union import rich +import yaml from rich.console import Console from rich.table import Table @@ -22,15 +23,22 @@ def print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj): swf_passed = 0 swf_warned = 0 swf_failed = 0 + module_passed = 0 + module_warned = 0 + module_failed = 0 if subworkflow_lint_obj is not None: swf_passed = len(subworkflow_lint_obj.passed) swf_warned = len(subworkflow_lint_obj.warned) swf_failed = len(subworkflow_lint_obj.failed) - nbr_passed = len(lint_obj.passed) + len(module_lint_obj.passed) + swf_passed + if module_lint_obj is not None: + module_passed = len(module_lint_obj.passed) + module_warned = len(module_lint_obj.warned) + module_failed = len(module_lint_obj.failed) + nbr_passed = len(lint_obj.passed) + module_passed + swf_passed nbr_ignored = len(lint_obj.ignored) nbr_fixed = len(lint_obj.fixed) - nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned) + swf_warned - nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed + nbr_warned = len(lint_obj.warned) + module_warned + swf_warned + nbr_failed = len(lint_obj.failed) + module_failed + swf_failed summary_colour = "red" if nbr_failed > 0 else "green" table = Table(box=rich.box.ROUNDED, style=summary_colour) @@ -62,7 +70,7 @@ def print_fixes(lint_obj): ) -def run_prettier_on_file(file): +def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: """Run the pre-commit hook prettier on a file. Args: @@ -73,12 +81,15 @@ def run_prettier_on_file(file): """ nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" + args = ["pre-commit", "run", "--config", str(nf_core_pre_commit_config), "prettier"] + if isinstance(file, List): + args.extend(["--files", *file]) + else: + args.extend(["--files", str(file)]) + try: - subprocess.run( - ["pre-commit", "run", "--config", nf_core_pre_commit_config, "prettier", "--files", file], - capture_output=True, - check=True, - ) + subprocess.run(args, capture_output=True, check=True) + log.debug(f"${subprocess.STDOUT}") except subprocess.CalledProcessError as e: if ": SyntaxError: " in e.stdout.decode(): log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") @@ -104,6 +115,18 @@ def dump_json_with_prettier(file_name, file_content): run_prettier_on_file(file_name) +def dump_yaml_with_prettier(file_name: Union[Path, str], file_content: dict) -> None: + """Dump a YAML file and run prettier on it. + + Args: + file_name (Path | str): A file identifier as a string or pathlib.Path. + file_content (dict): Content to dump into the YAML file + """ + with open(file_name, "w") as fh: + yaml.safe_dump(file_content, fh) + run_prettier_on_file(file_name) + + def ignore_file(lint_name: str, file_path: Path, dir_path: Path) -> List[List[str]]: """Ignore a file and add the result to the ignored list. Return the passed, failed, ignored and ignore_configs lists.""" diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py new file mode 100644 index 0000000000..de00189a2c --- /dev/null +++ b/nf_core/pipelines/rocrate.py @@ -0,0 +1,400 @@ +#!/usr/bin/env python +"""Code to deal with pipeline RO (Research Object) Crates""" + +import logging +import os +import re +import sys +from datetime import datetime +from pathlib import Path +from typing import Dict, List, Optional, Set, Union + +import requests +import rocrate.rocrate +from git import GitCommandError, InvalidGitRepositoryError +from repo2rocrate.nextflow import NextflowCrateBuilder +from rich.progress import BarColumn, Progress +from rocrate.model.person import Person +from rocrate.rocrate import ROCrate as BaseROCrate + +from nf_core.pipelines.schema import PipelineSchema +from nf_core.utils import Pipeline + +log = logging.getLogger(__name__) + + +class CustomNextflowCrateBuilder(NextflowCrateBuilder): + DATA_ENTITIES = NextflowCrateBuilder.DATA_ENTITIES + [ + ("docs/usage.md", "File", "Usage documentation"), + ("docs/output.md", "File", "Output documentation"), + ("suborkflows/local", "Dataset", "Pipeline-specific suborkflows"), + ("suborkflows/nf-core", "Dataset", "nf-core suborkflows"), + (".nf-core.yml", "File", "nf-core configuration file, configuring template features and linting rules"), + (".pre-commit-config.yaml", "File", "Configuration file for pre-commit hooks"), + (".prettierignore", "File", "Ignore file for prettier"), + (".prettierrc", "File", "Configuration file for prettier"), + ] + + +def custom_make_crate( + root: Path, + workflow: Optional[Path] = None, + repo_url: Optional[str] = None, + wf_name: Optional[str] = None, + wf_version: Optional[str] = None, + lang_version: Optional[str] = None, + ci_workflow: Optional[str] = "ci.yml", + diagram: Optional[Path] = None, +) -> BaseROCrate: + builder = CustomNextflowCrateBuilder(root, repo_url=repo_url) + + return builder.build( + workflow, + wf_name=wf_name, + wf_version=wf_version, + lang_version=lang_version, + license=None, + ci_workflow=ci_workflow, + diagram=diagram, + ) + + +class ROCrate: + """ + Class to generate an RO Crate for a pipeline + + """ + + def __init__(self, pipeline_dir: Path, version="") -> None: + """ + Initialise the ROCrate object + + Args: + pipeline_dir (Path): Path to the pipeline directory + version (str): Version of the pipeline to checkout + """ + from nf_core.utils import is_pipeline_directory, setup_requests_cachedir + + is_pipeline_directory(pipeline_dir) + self.pipeline_dir = pipeline_dir + self.version: str = version + self.crate: rocrate.rocrate.ROCrate + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + self.pipeline_obj.schema_obj = PipelineSchema() + # Assume we're in a pipeline dir root if schema path not set + self.pipeline_obj.schema_obj.get_schema_path(self.pipeline_dir) + self.pipeline_obj.schema_obj.load_schema() + + setup_requests_cachedir() + + def create_rocrate( + self, outdir: Path, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None + ) -> None: + """ + Create an RO Crate for a pipeline + + Args: + outdir (Path): Path to the output directory + json_path (Path): Path to the metadata file + zip_path (Path): Path to the zip file + + """ + # Set input paths + try: + self.set_crate_paths(outdir) + except OSError as e: + log.error(e) + sys.exit(1) + + # Change to the pipeline directory, because the RO Crate doesn't handle relative paths well + + # Check that the checkout pipeline version is the same as the requested version + if self.version != "": + if self.version != self.pipeline_obj.nf_config.get("manifest.version"): + # using git checkout to get the requested version + log.info(f"Checking out pipeline version {self.version}") + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + sys.exit(1) + try: + self.pipeline_obj.repo.git.checkout(self.version) + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + except InvalidGitRepositoryError: + log.error(f"Could not find a git repository in {self.pipeline_dir}") + sys.exit(1) + except GitCommandError: + log.error(f"Could not checkout version {self.version}") + sys.exit(1) + self.version = self.pipeline_obj.nf_config.get("manifest.version", "") + self.make_workflow_rocrate() + + # Save just the JSON metadata file + if json_path is not None: + if json_path.name != "ro-crate-metadata.json": + json_path = json_path / "ro-crate-metadata.json" + + log.info(f"Saving metadata file to '{json_path}'") + self.crate.metadata.write(json_path) + + # Save the whole crate zip file + if zip_path is not None: + if zip_path.name != "ro-crate.crate.zip": + zip_path = zip_path / "ro-crate.crate.zip" + log.info(f"Saving zip file '{zip_path}") + self.crate.write_zip(zip_path) + + def make_workflow_rocrate(self) -> None: + """ + Create an RO Crate for a pipeline + """ + if self.pipeline_obj is None: + raise ValueError("Pipeline object not loaded") + + diagram: Optional[Path] = None + # find files (metro|tube)_?(map)?.png in the pipeline directory or docs/ using pathlib + pattern = re.compile(r".*?(metro|tube|subway)_(map).*?\.png", re.IGNORECASE) + for file in self.pipeline_dir.rglob("*.png"): + if pattern.match(file.name): + log.debug(f"Found diagram: {file}") + diagram = file.relative_to(self.pipeline_dir) + break + + # Create the RO Crate object + + self.crate = custom_make_crate( + self.pipeline_dir, + self.pipeline_dir / "main.nf", + self.pipeline_obj.nf_config.get("manifest.homePage", ""), + self.pipeline_obj.nf_config.get("manifest.name", ""), + self.pipeline_obj.nf_config.get("manifest.version", ""), + self.pipeline_obj.nf_config.get("manifest.nextflowVersion", ""), + diagram=diagram, + ) + + # add readme as description + readme = Path("README.md") + + try: + self.crate.description = readme.read_text() + except FileNotFoundError: + log.error(f"Could not find README.md in {self.pipeline_dir}") + # get license from LICENSE file + license_file = Path("LICENSE") + try: + license = license_file.read_text() + if license.startswith("MIT"): + self.crate.license = "MIT" + else: + # prompt for license + log.info("Could not determine license from LICENSE file") + self.crate.license = input("Please enter the license for this pipeline: ") + except FileNotFoundError: + log.error(f"Could not find LICENSE file in {self.pipeline_dir}") + + self.crate.add_jsonld( + {"@id": "https://nf-co.re/", "@type": "Organization", "name": "nf-core", "url": "https://nf-co.re/"} + ) + + # Set metadata for main entity file + self.set_main_entity("main.nf") + + def set_main_entity(self, main_entity_filename: str): + """ + Set the main.nf as the main entity of the crate and add necessary metadata + """ + if self.crate.mainEntity is None: + raise ValueError("Main entity not set") + + self.crate.mainEntity.append_to( + "dct:conformsTo", "https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE/", compact=True + ) + # add dateCreated and dateModified, based on the current data + self.crate.mainEntity.append_to("dateCreated", self.crate.root_dataset.get("dateCreated", ""), compact=True) + self.crate.mainEntity.append_to( + "dateModified", str(datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")), compact=True + ) + self.crate.mainEntity.append_to("sdPublisher", {"@id": "https://nf-co.re/"}, compact=True) + if self.version.endswith("dev"): + url = "dev" + else: + url = self.version + self.crate.mainEntity.append_to( + "url", f"https://nf-co.re/{self.crate.name.replace('nf-core/','')}/{url}/", compact=True + ) + self.crate.mainEntity.append_to("version", self.version, compact=True) + if self.pipeline_obj.schema_obj is not None: + log.debug("input value") + + schema_input = self.pipeline_obj.schema_obj.schema["definitions"]["input_output_options"]["properties"][ + "input" + ] + input_value: Dict[str, Union[str, List[str], bool]] = { + "@id": "#input", + "@type": ["FormalParameter"], + "default": schema_input.get("default", ""), + "encodingFormat": schema_input.get("mimetype", ""), + "valueRequired": "input" + in self.pipeline_obj.schema_obj.schema["definitions"]["input_output_options"]["required"], + "dct:conformsTo": "https://bioschemas.org/types/FormalParameter/1.0-RELEASE", + } + self.crate.add_jsonld(input_value) + self.crate.mainEntity.append_to( + "input", + {"@id": "#input"}, + ) + + # get keywords from nf-core website + remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] + # go through all remote workflows and find the one that matches the pipeline name + topics = ["nf-core", "nextflow"] + for remote_wf in remote_workflows: + assert self.pipeline_obj.pipeline_name is not None # mypy + if remote_wf["name"] == self.pipeline_obj.pipeline_name.replace("nf-core/", ""): + topics = topics + remote_wf["topics"] + break + + log.debug(f"Adding topics: {topics}") + self.crate.mainEntity.append_to("keywords", topics) + + self.add_main_authors(self.crate.mainEntity) + + self.crate.mainEntity = self.crate.mainEntity + + self.crate.mainEntity.append_to("license", self.crate.license) + self.crate.mainEntity.append_to("name", self.crate.name) + + if "dev" in self.version: + self.crate.creativeWorkStatus = "InProgress" + else: + self.crate.creativeWorkStatus = "Stable" + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + else: + tags = self.pipeline_obj.repo.tags + if tags: + # get the tag for this version + for tag in tags: + if tag.commit.hexsha == self.pipeline_obj.repo.head.commit.hexsha: + self.crate.mainEntity.append_to( + "dateCreated", + tag.commit.committed_datetime.strftime("%Y-%m-%dT%H:%M:%SZ"), + compact=True, + ) + + def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: + """ + Add workflow authors to the crate + """ + # add author entity to crate + + try: + authors = self.pipeline_obj.nf_config["manifest.author"].split(",") + # remove spaces + authors = [a.strip() for a in authors] + # add manifest authors as maintainer to crate + + except KeyError: + log.error("No author field found in manifest of nextflow.config") + return + # look at git contributors for author names + try: + git_contributors: Set[str] = set() + assert self.pipeline_obj.repo is not None # mypy + commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) + + for commit in commits_touching_path: + if commit.author.name is not None: + git_contributors.add(commit.author.name) + # exclude bots + contributors = {c for c in git_contributors if not c.endswith("bot") and c != "Travis CI User"} + + log.debug(f"Found {len(contributors)} git authors") + + progress_bar = Progress( + "[bold blue]{task.description}", + BarColumn(bar_width=None), + "[magenta]{task.completed} of {task.total}[reset] Β» [bold yellow]{task.fields[test_name]}", + transient=True, + disable=os.environ.get("HIDE_PROGRESS", None) is not None, + ) + with progress_bar: + bump_progress = progress_bar.add_task( + "Searching for author names on GitHub", total=len(contributors), test_name="" + ) + + for git_author in contributors: + progress_bar.update(bump_progress, advance=1, test_name=git_author) + git_author = ( + requests.get(f"https://api.github.com/users/{git_author}").json().get("name", git_author) + ) + if git_author is None: + log.debug(f"Could not find name for {git_author}") + continue + + except AttributeError: + log.debug("Could not find git contributors") + + # remove usernames (just keep names with spaces) + named_contributors = {c for c in contributors if " " in c} + + for author in named_contributors: + log.debug(f"Adding author: {author}") + assert self.pipeline_obj.repo is not None # mypy + # get email from git log + email = self.pipeline_obj.repo.git.log(f"--author={author}", "--pretty=format:%ae", "-1") + orcid = get_orcid(author) + author_entitity = self.crate.add( + Person( + self.crate, orcid if orcid is not None else "#" + email, properties={"name": author, "email": email} + ) + ) + wf_file.append_to("creator", author_entitity) + if author in authors: + wf_file.append_to("maintainer", author_entitity) + + def set_crate_paths(self, path: Path) -> None: + """Given a pipeline name, directory, or path, set wf_crate_filename""" + + if path.is_dir(): + self.pipeline_dir = path + # wf_crate_filename = path / "ro-crate-metadata.json" + elif path.is_file(): + self.pipeline_dir = path.parent + # wf_crate_filename = path + + # Check that the schema file exists + if self.pipeline_dir is None: + raise OSError(f"Could not find pipeline '{path}'") + + +def get_orcid(name: str) -> Optional[str]: + """ + Get the ORCID for a given name + + Args: + name (str): Name of the author + + Returns: + str: ORCID URI or None + """ + base_url = "https://pub.orcid.org/v3.0/search/" + headers = { + "Accept": "application/json", + } + params = {"q": f'family-name:"{name.split()[-1]}" AND given-names:"{name.split()[0]}"'} + response = requests.get(base_url, params=params, headers=headers) + + if response.status_code == 200: + json_response = response.json() + if json_response.get("num-found") == 1: + orcid_uri = json_response.get("result")[0].get("orcid-identifier", {}).get("uri") + log.info(f"Using found ORCID for {name}. Please double-check: {orcid_uri}") + return orcid_uri + else: + log.debug(f"No exact ORCID found for {name}. See {response.url}") + return None + else: + log.info(f"API request to ORCID unsuccessful. Status code: {response.status_code}") + return None diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 7f562bff38..cde4af3fd5 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -32,7 +32,7 @@ def __init__(self): self.schema = {} self.pipeline_dir = "" - self.schema_filename = "" + self._schema_filename = "" self.schema_defaults = {} self.schema_types = {} self.schema_params = {} @@ -43,9 +43,78 @@ def __init__(self): self.schema_from_scratch = False self.no_prompts = False self.web_only = False - self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder" + self.web_schema_build_url = "https://oldsite.nf-co.re/pipeline_schema_builder" self.web_schema_build_web_url = None self.web_schema_build_api_url = None + self.validation_plugin = None + self.schema_draft = None + self.defs_notation = None + self.ignored_params = [] + + # Update the validation plugin code everytime the schema gets changed + def set_schema_filename(self, schema: str) -> None: + self._schema_filename = schema + self._update_validation_plugin_from_config() + + def get_schema_filename(self) -> str: + return self._schema_filename + + def del_schema_filename(self) -> None: + del self._schema_filename + + schema_filename = property(get_schema_filename, set_schema_filename, del_schema_filename) + + def _update_validation_plugin_from_config(self) -> None: + plugin = "nf-schema" + if self.schema_filename: + conf = nf_core.utils.fetch_wf_config(Path(self.schema_filename).parent) + else: + conf = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) + + plugins = str(conf.get("plugins", "")).strip("'\"").strip(" ").split(",") + plugin_found = False + for plugin_instance in plugins: + if "nf-schema" in plugin_instance: + plugin = "nf-schema" + plugin_found = True + break + elif "nf-validation" in plugin_instance: + plugin = "nf-validation" + plugin_found = True + break + + if not plugin_found: + log.info( + "Could not find nf-schema or nf-validation in the pipeline config. Defaulting to nf-schema notation for the JSON schema." + ) + + self.validation_plugin = plugin + # Previous versions of nf-schema used "defs", but it's advised to use "$defs" + if plugin == "nf-schema": + self.defs_notation = "$defs" + ignored_params = [ + conf.get("validation.help.shortParameter", "help"), + conf.get("validation.help.fullParameter", "helpFull"), + conf.get("validation.help.showHiddenParameter", "showHidden"), + ] # Help parameter should be ignored by default + ignored_params_config_str = conf.get("validation.defaultIgnoreParams", "") + ignored_params_config = [ + item.strip().strip("'") for item in ignored_params_config_str[1:-1].split(",") + ] # Extract list elements and remove whitespace + + if len(ignored_params_config) > 0: + log.debug(f"Ignoring parameters from config: {ignored_params_config}") + ignored_params.extend(ignored_params_config) + self.ignored_params = ignored_params + log.debug(f"Ignoring parameters: {self.ignored_params}") + self.schema_draft = "https://json-schema.org/draft/2020-12/schema" + + else: + self.defs_notation = "definitions" + self.schema_draft = "https://json-schema.org/draft-07/schema" + self.get_wf_params() + self.ignored_params = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") + self.ignored_params.append("validationSchemaIgnoreParams") def get_schema_path( self, path: Union[str, Path], local_only: bool = False, revision: Union[str, None] = None @@ -116,6 +185,8 @@ def load_schema(self): self.schema = json.load(fh) self.schema_defaults = {} self.schema_params = {} + if "$schema" not in self.schema: + raise AssertionError("Schema missing top-level `$schema` attribute") log.debug(f"JSON file loaded: {self.schema_filename}") def sanitise_param_default(self, param): @@ -168,10 +239,11 @@ def get_schema_defaults(self) -> None: if param["default"] is not None: self.schema_defaults[p_key] = param["default"] + # TODO add support for nested parameters # Grouped schema properties in subschema definitions - for defn_name, definition in self.schema.get("definitions", {}).items(): + for defn_name, definition in self.schema.get(self.defs_notation, {}).items(): for p_key, param in definition.get("properties", {}).items(): - self.schema_params[p_key] = ("definitions", defn_name, "properties", p_key) + self.schema_params[p_key] = (self.defs_notation, defn_name, "properties", p_key) if "default" in param: param = self.sanitise_param_default(param) if param["default"] is not None: @@ -182,7 +254,7 @@ def get_schema_types(self) -> None: for name, param in self.schema.get("properties", {}).items(): if "type" in param: self.schema_types[name] = param["type"] - for _, definition in self.schema.get("definitions", {}).items(): + for _, definition in self.schema.get(self.defs_notation, {}).items(): for name, param in definition.get("properties", {}).items(): if "type" in param: self.schema_types[name] = param["type"] @@ -191,7 +263,7 @@ def save_schema(self, suppress_logging=False): """Save a pipeline schema to a file""" # Write results to a JSON file num_params = len(self.schema.get("properties", {})) - num_params += sum(len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()) + num_params += sum(len(d.get("properties", {})) for d in self.schema.get(self.defs_notation, {}).values()) if not suppress_logging: log.info(f"Writing schema with {num_params} params: '{self.schema_filename}'") dump_json_with_prettier(self.schema_filename, self.schema) @@ -248,13 +320,14 @@ def validate_default_params(self): if self.schema is None: log.error("[red][βœ—] Pipeline schema not found") try: + # TODO add support for nested parameters # Make copy of schema and remove required flags schema_no_required = copy.deepcopy(self.schema) if "required" in schema_no_required: schema_no_required.pop("required") - for group_key, group in schema_no_required.get("definitions", {}).items(): + for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): if "required" in group: - schema_no_required["definitions"][group_key].pop("required") + schema_no_required[self.defs_notation][group_key].pop("required") jsonschema.validate(self.schema_defaults, schema_no_required) except jsonschema.exceptions.ValidationError as e: raise AssertionError(f"Default parameters are invalid: {e.message}") @@ -269,17 +342,11 @@ def validate_default_params(self): if self.pipeline_params == {}: self.get_wf_params() - # Collect parameters to ignore - if "validationSchemaIgnoreParams" in self.pipeline_params: - params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") - else: - params_ignore = [] - # Go over group keys - for group_key, group in schema_no_required.get("definitions", {}).items(): + for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): group_properties = group.get("properties") for param in group_properties: - if param in params_ignore: + if param in self.ignored_params: continue if param in self.pipeline_params: self.validate_config_default_parameter(param, group_properties[param], self.pipeline_params[param]) @@ -292,7 +359,7 @@ def validate_default_params(self): ungrouped_properties = self.schema.get("properties") if ungrouped_properties: for param in ungrouped_properties: - if param in params_ignore: + if param in self.ignored_params: continue if param in self.pipeline_params: self.validate_config_default_parameter( @@ -312,7 +379,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) # If we have a default in the schema, check it matches the config if "default" in schema_param and ( (schema_param["type"] == "boolean" and str(config_default).lower() != str(schema_param["default"]).lower()) - and (str(schema_param["default"]) != str(config_default).strip('"').strip("'")) + and (str(schema_param["default"]) != str(config_default).strip("'\"")) ): # Check that we are not deferring the execution of this parameter in the schema default with squiggly brakcets if schema_param["type"] != "string" or "{" not in schema_param["default"]: @@ -359,39 +426,66 @@ def validate_schema(self, schema=None): """ if schema is None: schema = self.schema - try: - jsonschema.Draft7Validator.check_schema(schema) - log.debug("JSON Schema Draft7 validated") - except jsonschema.exceptions.SchemaError as e: - raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") + + if "$schema" not in schema: + raise AssertionError("Schema missing top-level `$schema` attribute") + schema_draft = schema["$schema"] + if self.schema_draft != schema_draft: + raise AssertionError(f"Schema is using the wrong draft: {schema_draft}, should be {self.schema_draft}") + if self.schema_draft == "https://json-schema.org/draft-07/schema": + try: + jsonschema.Draft7Validator.check_schema(schema) + log.debug("JSON Schema Draft7 validated") + except jsonschema.exceptions.SchemaError as e: + raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") + elif self.schema_draft == "https://json-schema.org/draft/2020-12/schema": + try: + jsonschema.Draft202012Validator.check_schema(schema) + log.debug("JSON Schema Draft2020-12 validated") + except jsonschema.exceptions.SchemaError as e: + raise AssertionError(f"Schema does not validate as Draft 2020-12 JSON Schema:\n {e}") + else: + raise AssertionError( + f"Schema `$schema` should be `https://json-schema.org/draft/2020-12/schema` or `https://json-schema.org/draft-07/schema` \n Found `{schema_draft}`" + ) param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) - for d_key, d_schema in schema.get("definitions", {}).items(): + + # Add a small check for older nf-schema JSON schemas + if "defs" in schema: + raise AssertionError( + f'Using "defs" for schema definitions is not supported. Please use "{self.defs_notation}" instead' + ) + + for d_key, d_schema in schema.get(self.defs_notation, {}).items(): # Check that this definition is mentioned in allOf if "allOf" not in schema: raise AssertionError("Schema has definitions, but no allOf key") in_allOf = False for allOf in schema.get("allOf", []): - if allOf["$ref"] == f"#/definitions/{d_key}": + if allOf["$ref"] == f"#/{self.defs_notation}/{d_key}": in_allOf = True if not in_allOf: - raise AssertionError(f"Definition subschema `{d_key}` not included in schema `allOf`") + raise AssertionError( + f"Definition subschema `#/{self.defs_notation}/{d_key}` not included in schema `allOf`" + ) + # TODO add support for nested parameters for d_param_id in d_schema.get("properties", {}): # Check that we don't have any duplicate parameter IDs in different definitions if d_param_id in param_keys: - raise AssertionError(f"Duplicate parameter found in schema `definitions`: `{d_param_id}`") + raise AssertionError(f"Duplicate parameter found in schema `{self.defs_notation}`: `{d_param_id}`") param_keys.append(d_param_id) num_params += 1 # Check that everything in allOf exists for allOf in schema.get("allOf", []): - if "definitions" not in schema: - raise AssertionError("Schema has allOf, but no definitions") - def_key = allOf["$ref"][14:] - if def_key not in schema.get("definitions", {}): - raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `definitions`") + _, allof_defs_notation, def_key = allOf["$ref"].split("/") # "#//" + if allof_defs_notation not in schema: + raise AssertionError(f"Schema has allOf, but no {allof_defs_notation}") + if def_key not in schema.get(allof_defs_notation, {}): + raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `{allof_defs_notation}`") # Check that the schema describes at least one parameter if num_params == 0: @@ -402,7 +496,7 @@ def validate_schema(self, schema=None): def validate_schema_title_description(self, schema=None): """ Extra validation command for linting. - Checks that the schema "$id", "title" and "description" attributes match the piipeline config. + Checks that the schema "$id", "title" and "description" attributes match the pipeline config. """ if schema is None: schema = self.schema @@ -410,12 +504,6 @@ def validate_schema_title_description(self, schema=None): log.debug("Pipeline schema not set - skipping validation of top-level attributes") return None - if "$schema" not in self.schema: - raise AssertionError("Schema missing top-level `$schema` attribute") - schema_attr = "http://json-schema.org/draft-07/schema" - if self.schema["$schema"] != schema_attr: - raise AssertionError(f"Schema `$schema` should be `{schema_attr}`\n Found `{self.schema['$schema']}`") - if self.pipeline_manifest == {}: self.get_wf_params() @@ -465,9 +553,9 @@ def check_for_input_mimetype(self): if "input" not in self.schema_params: raise LookupError("Parameter `input` not found in schema") # Check that the input parameter is defined in the right place - if "input" not in self.schema.get("definitions", {}).get("input_output_options", {}).get("properties", {}): + if "input" not in self.schema.get(self.defs_notation, {}).get("input_output_options", {}).get("properties", {}): raise LookupError("Parameter `input` is not defined in the correct subschema (input_output_options)") - input_entry = self.schema["definitions"]["input_output_options"]["properties"]["input"] + input_entry = self.schema[self.defs_notation]["input_output_options"]["properties"]["input"] if "mimetype" not in input_entry: return None mimetype = input_entry["mimetype"] @@ -519,7 +607,7 @@ def schema_to_markdown(self, columns): out = f"# {self.schema['title']}\n\n" out += f"{self.schema['description']}\n" # Grouped parameters - for definition in self.schema.get("definitions", {}).values(): + for definition in self.schema.get(self.defs_notation, {}).values(): out += f"\n## {definition.get('title', {})}\n\n" out += f"{definition.get('description', '')}\n\n" required = definition.get("required", []) @@ -701,15 +789,15 @@ def remove_schema_empty_definitions(self): """ # Identify and remove empty definitions from the schema empty_definitions = [] - for d_key, d_schema in list(self.schema.get("definitions", {}).items()): + for d_key, d_schema in list(self.schema.get(self.defs_notation, {}).items()): if not d_schema.get("properties"): - del self.schema["definitions"][d_key] + del self.schema[self.defs_notation][d_key] empty_definitions.append(d_key) log.warning(f"Removing empty group: '{d_key}'") # Remove "allOf" group with empty definitions from the schema for d_key in empty_definitions: - allOf = {"$ref": f"#/definitions/{d_key}"} + allOf = {"$ref": f"#/{self.defs_notation}/{d_key}"} if allOf in self.schema.get("allOf", []): self.schema["allOf"].remove(allOf) @@ -718,8 +806,8 @@ def remove_schema_empty_definitions(self): del self.schema["allOf"] # If we don't have anything left in "definitions", remove it - if self.schema.get("definitions") == {}: - del self.schema["definitions"] + if self.schema.get(self.defs_notation) == {}: + del self.schema[self.defs_notation] def remove_schema_notfound_configs(self): """ @@ -729,9 +817,9 @@ def remove_schema_notfound_configs(self): # Top-level properties self.schema, params_removed = self.remove_schema_notfound_configs_single_schema(self.schema) # Sub-schemas in definitions - for d_key, definition in self.schema.get("definitions", {}).items(): + for d_key, definition in self.schema.get(self.defs_notation, {}).items(): cleaned_schema, p_removed = self.remove_schema_notfound_configs_single_schema(definition) - self.schema["definitions"][d_key] = cleaned_schema + self.schema[self.defs_notation][d_key] = cleaned_schema params_removed.extend(p_removed) return params_removed @@ -783,13 +871,12 @@ def add_schema_found_configs(self): Update defaults if they have changed """ params_added = [] - params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") - params_ignore.append("validationSchemaIgnoreParams") + for p_key, p_val in self.pipeline_params.items(): s_key = self.schema_params.get(p_key) # Check if key is in schema parameters # Key is in pipeline but not in schema or ignored from schema - if p_key not in self.schema_params and p_key not in params_ignore: + if p_key not in self.schema_params and p_key not in self.ignored_params: if ( self.no_prompts or self.schema_from_scratch @@ -822,7 +909,7 @@ def add_schema_found_configs(self): elif ( s_key and (p_key not in self.schema_defaults) - and (p_key not in params_ignore) + and (p_key not in self.ignored_params) and (p_def := self.build_schema_param(p_val).get("default")) ): if self.no_prompts or Confirm.ask( @@ -869,6 +956,7 @@ def launch_web_builder(self): """ Send pipeline schema to web builder and wait for response """ + content = { "post_content": "json_schema", "api": "true", @@ -877,6 +965,7 @@ def launch_web_builder(self): "schema": json.dumps(self.schema), } web_response = nf_core.utils.poll_nfcore_web_api(self.web_schema_build_url, content) + try: if "api_url" not in web_response: raise AssertionError('"api_url" not in web_response') diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index fced35dc20..12b29f15ec 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -21,6 +21,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils +from nf_core.pipelines.lint_utils import dump_yaml_with_prettier log = logging.getLogger(__name__) @@ -273,19 +274,24 @@ def make_template_pipeline(self): yaml.safe_dump(self.config_yml.model_dump(), config_path) try: - nf_core.pipelines.create.create.PipelineCreate( + pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( outdir=str(self.pipeline_dir), from_config_file=True, no_git=True, force=True, - ).init_pipeline() + ) + pipeline_create_obj.init_pipeline() # set force to false to avoid overwriting files in the future if self.config_yml.template is not None: + self.config_yml.template = pipeline_create_obj.config # Set force true in config to overwrite existing files self.config_yml.template.force = False - with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + # Set outdir as the current directory to avoid local info leaking + self.config_yml.template.outdir = "." + # Update nf-core version + self.config_yml.nf_core_version = nf_core.__version__ + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump()) except Exception as err: # Reset to where you were to prevent git getting messed up. diff --git a/nf_core/ro_crate.py b/nf_core/ro_crate.py new file mode 100644 index 0000000000..0c3b486ac2 --- /dev/null +++ b/nf_core/ro_crate.py @@ -0,0 +1,324 @@ +#!/usr/bin/env python +"""Code to deal with pipeline RO (Research Object) Crates""" + +import logging +import tempfile +from pathlib import Path +from typing import Union + +import requests +import rocrate.model.entity +import rocrate.rocrate +from git import GitCommandError, InvalidGitRepositoryError +from rocrate.model.person import Person + +from nf_core.utils import Pipeline + +log = logging.getLogger(__name__) + + +class ROCrate: + """ + Class to generate an RO Crate for a pipeline + + Args: + pipeline_dir (Path): Path to the pipeline directory + version (str): Version of the pipeline to use + + """ + + def __init__(self, pipeline_dir: Path, version=""): + from nf_core.utils import is_pipeline_directory, setup_requests_cachedir + + is_pipeline_directory(pipeline_dir) + self.pipeline_dir = pipeline_dir + self.version = version + self.crate: rocrate.rocrate.ROCrate + self.pipeline_obj = Pipeline(str(self.pipeline_dir)) + self.pipeline_obj._load() + + setup_requests_cachedir() + + def create_ro_crate( + self, outdir: Path, metadata_path: Union[None, Path] = None, zip_path: Union[None, Path] = None + ) -> None: + """ + Create an RO Crate for a pipeline + + Args: + outdir (Path): Path to the output directory + metadata_path (Path): Path to the metadata file + zip_path (Path): Path to the zip file + + """ + import os + + # Set input paths + try: + self.set_crate_paths(outdir) + except OSError as e: + log.error(e) + sys.exit(1) + + # Change to the pipeline directory, because the RO Crate doesn't handle relative paths well + current_path = Path.cwd() + os.chdir(self.pipeline_dir) + + # Check that the checkout pipeline version is the same as the requested version + if self.version: + if self.version != self.pipeline_obj.nf_config.get("manifest.version"): + # using git checkout to get the requested version + log.info(f"Checking out pipeline version {self.version}") + try: + self.pipeline_obj.repo.git.checkout(self.version) + self.pipeline_obj = Pipeline(str(self.pipeline_dir)) + self.pipeline_obj._load() + except InvalidGitRepositoryError: + log.error(f"Could not find a git repository in {self.pipeline_dir}") + sys.exit(1) + except GitCommandError: + log.error(f"Could not checkout version {self.version}") + sys.exit(1) + + self.make_workflow_ro_crate() + + # Save just the JSON metadata file + if metadata_path is not None: + log.info(f"Saving metadata file '{metadata_path}'") + # Save the crate to a temporary directory + tmpdir = Path(tempfile.mkdtemp(), "wf") + self.crate.write(tmpdir) + # Now save just the JSON file + crate_json_fn = Path(tmpdir, "ro-crate-metadata.json") + if metadata_path.name == "ro-crate-metadata.json": + crate_json_fn.rename(metadata_path) + else: + crate_json_fn.rename(metadata_path / "ro-crate-metadata.json") + + # Save the whole crate zip file + if zip_path is not None: + if zip_path.name == "ro-crate.crate.zip": + log.info(f"Saving zip file '{zip_path}'") + self.crate.write_zip(zip_path) + else: + log.info(f"Saving zip file '{zip_path}/ro-crate.crate.zip;") + self.crate.write_zip(zip_path / "ro-crate.crate.zip") + + # Change back to the original directory + os.chdir(current_path) + + def make_workflow_ro_crate(self) -> None: + """ + Create an RO Crate for a pipeline + """ + if self.pipeline_obj is None: + raise ValueError("Pipeline object not loaded") + + # Create the RO Crate object + self.crate = rocrate.rocrate.ROCrate() + + # Set language type + programming_language = rocrate.model.entity.Entity( + self.crate, + "#nextflow", + properties={ + "@type": ["ComputerLanguage", "SoftwareApplication"], + "name": "Nextflow", + "url": "https://www.nextflow.io/", + "identifier": "https://www.nextflow.io/", + "version": self.pipeline_obj.nf_config.get("manifest.nextflowVersion", ""), + }, + ) + self.crate.add(programming_language) + + # Conform to RO-Crate 1.1 and workflowhub-ro-crate + self.crate.update_jsonld( + { + "@id": "ro-crate-metadata.json", + "conformsTo": [ + {"@id": "https://w3id.org/ro/crate/1.1"}, + {"@id": "https://w3id.org/workflowhub/workflow-ro-crate/1.0"}, + ], + } + ) + + # Set main entity file + self.set_main_entity("main.nf") + + # add readme as description + readme = Path("README.md") + + try: + self.crate.description = readme.read_text() + except FileNotFoundError: + log.error(f"Could not find README.md in {self.pipeline_dir}") + # get license from LICENSE file + license_file = Path("LICENSE") + try: + license = license_file.read_text() + if license.startswith("MIT"): + self.crate.license = "MIT" + else: + # prompt for license + log.info("Could not determine license from LICENSE file") + self.crate.license = input("Please enter the license for this pipeline: ") + except FileNotFoundError: + log.error(f"Could not find LICENSE file in {self.pipeline_dir}") + + # add doi as identifier + self.crate.name = f'Research Object Crate for {self.pipeline_obj.nf_config.get("manifest.name")}' + + if "dev" in self.pipeline_obj.nf_config.get("manifest.version", ""): + self.crate.CreativeWorkStatus = "InProgress" + else: + self.crate.CreativeWorkStatus = "Stable" + + # Add all other files + self.add_workflow_files() + + def set_main_entity(self, main_entity_filename: str): + """ + Set the main.nf as the main entity of the crate and add necessary metadata + """ + + wf_file = self.crate.add_jsonld( + { + "@id": main_entity_filename, + "@type": ["File", "SoftwareSourceCode", "ComputationalWorkflow"], + }, + ) + self.crate.mainEntity = wf_file + self.add_main_authors(wf_file) + wf_file.append_to("programmingLanguage", {"@id": "#nextflow"}) + # get keywords from nf-core website + remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] + # go through all remote workflows and find the one that matches the pipeline name + topics = ["nf-core", "nextflow"] + for remote_wf in remote_workflows: + if remote_wf["name"] == self.pipeline_obj.pipeline_name.replace("nf-core/", ""): + topics = topics + remote_wf["topics"] + break + + log.debug(f"Adding topics: {topics}") + wf_file.append_to("keywords", topics) + + def add_main_authors(self, wf_file): + """ + Add workflow authors to the crate + """ + # add author entity to crate + + try: + authors = self.pipeline_obj.nf_config["manifest.author"].split(",") + # remove spaces + authors = [a.strip() for a in authors] + except KeyError: + log.error("No author field found in manifest of nextflow.config") + return + # look at git contributors for author names + try: + contributors = set() + + commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) + + for commit in commits_touching_path: + contributors.add(commit.author.name) + # exclude bots + contributors = [c for c in contributors if not c.endswith("bot") or c != "Travis CI User"] + # remove usernames (just keep names with spaces) + contributors = [c for c in contributors if " " in c] + + log.debug(f"Found {len(contributors)} git authors") + for git_author in contributors: + if git_author not in authors: + authors.append(git_author) + except AttributeError: + log.debug("Could not find git authors") + + for author in authors: + log.debug(f"Adding author: {author}") + orcid = get_orcid(author) + author_entitity = self.crate.add(Person(self.crate, orcid, properties={"name": author})) + wf_file.append_to("author", author_entitity) + + def add_workflow_files(self): + """ + Add workflow files to the RO Crate + """ + import nf_core.utils + + wf_filenames = nf_core.utils.get_wf_files(Path.cwd()) + # exclude github action files + wf_filenames = [fn for fn in wf_filenames if not fn.startswith(".github/")] + log.debug(f"Adding {len(wf_filenames)} workflow files") + for fn in wf_filenames: + # skip main.nf + if fn == "main.nf": + continue + # add nextflow language to .nf and .config files + if fn.endswith(".nf") or fn.endswith(".config") or fn.endswith(".nf.test"): + log.debug(f"Adding workflow file: {fn}") + self.crate.add_file(fn, properties={"programmingLanguage": {"@id": "#nextflow"}}) + continue + if fn.endswith(".png"): + log.debug(f"Adding workflow image file: {fn}") + self.crate.add_jsonld({"@id": Path(fn).name, "@type": ["File", "ImageObject"]}) + if "metro_map" in fn: + log.info(f"Setting main entity image to: {fn}") + self.crate.mainEntity.append_to("image", {"@id": Path(fn).name}) + continue + if fn.endswith(".md"): + log.debug(f"Adding workflow file: {fn}") + self.crate.add_file(fn, properties={"encodingFormat": "text/markdown"}) + continue + else: + log.debug(f"Adding workflow file: {fn}") + self.crate.add_file(fn) + continue + + def set_crate_paths(self, path: Path) -> None: + """Given a pipeline name, directory, or path, set wf_crate_filename""" + + path = Path(path) + + if path.is_dir(): + self.pipeline_dir = path + # wf_crate_filename = path / "ro-crate-metadata.json" + elif path.is_file(): + self.pipeline_dir = path.parent + # wf_crate_filename = path + + # Check that the schema file exists + if self.pipeline_dir is None: + raise OSError(f"Could not find pipeline '{path}'") + + +def get_orcid(name: str) -> Union[str, None]: + """ + Get the ORCID for a given name + + Args: + name (str): Name of the author + + Returns: + str: ORCID URI or None + """ + base_url = "https://pub.orcid.org/v3.0/search/" + headers = { + "Accept": "application/json", + } + params = {"q": f'family-name:"{name.split()[-1]}" AND given-names:"{name.split()[0]}"'} + response = requests.get(base_url, params=params, headers=headers) + + if response.status_code == 200: + json_response = response.json() + if json_response.get("num-found") == 1: + orcid_uri = json_response.get("result")[0].get("orcid-identifier", {}).get("uri") + log.info(f"Using found ORCID for {name}. Please double-check: {orcid_uri}") + return orcid_uri + else: + log.debug(f"No exact ORCID found for {name}. See {response.url}") + return None + else: + log.info(f"API request to ORCID unsuccessful. Status code: {response.status_code}") + return None diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index b366ddfb51..cedae62f11 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -11,11 +11,12 @@ import questionary import rich +import ruamel.yaml import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) @@ -45,6 +46,7 @@ def __init__( self, directory, fail_warned=False, + fix=False, remote_url=None, branch=None, no_pull=False, @@ -55,6 +57,7 @@ def __init__( component_type="subworkflows", directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -214,6 +217,10 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): # Otherwise run all the lint tests else: + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(swf) + if self.repo_type == "pipeline" and self.modules_json: # Set correct sha version = self.modules_json.get_subworkflow_version(swf.component_name, swf.repo_url, swf.org) @@ -230,3 +237,56 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): self.failed += warned self.failed += [LintResult(swf, *s) for s in swf.failed] + + def update_meta_yml_file(self, swf): + """ + Update the meta.yml file with the correct inputs and outputs + """ + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Read meta.yml + with open(swf.meta_yml) as fh: + meta_yaml = yaml.load(fh) + meta_yaml_corrected = meta_yaml.copy() + # Obtain inputs and outputs from main.nf + swf.get_inputs_from_main_nf() + swf.get_outputs_from_main_nf() + + # Compare inputs and add them if missing + if "input" in meta_yaml: + # Delete inputs from meta.yml which are not present in main.nf + meta_yaml_corrected["input"] = [ + input for input in meta_yaml["input"] if list(input.keys())[0] in swf.inputs + ] + # Obtain inputs from main.nf missing in meta.yml + inputs_correct = [ + list(input.keys())[0] for input in meta_yaml_corrected["input"] if list(input.keys())[0] in swf.inputs + ] + inputs_missing = [input for input in swf.inputs if input not in inputs_correct] + # Add missing inputs to meta.yml + for missing_input in inputs_missing: + meta_yaml_corrected["input"].append({missing_input: {"description": ""}}) + + if "output" in meta_yaml: + # Delete outputs from meta.yml which are not present in main.nf + meta_yaml_corrected["output"] = [ + output for output in meta_yaml["output"] if list(output.keys())[0] in swf.outputs + ] + # Obtain output from main.nf missing in meta.yml + outputs_correct = [ + list(output.keys())[0] + for output in meta_yaml_corrected["output"] + if list(output.keys())[0] in swf.outputs + ] + outputs_missing = [output for output in swf.outputs if output not in outputs_correct] + # Add missing inputs to meta.yml + for missing_output in outputs_missing: + meta_yaml_corrected["output"].append({missing_output: {"description": ""}}) + + # Write corrected meta.yml to file + with open(swf.meta_yml, "w") as fh: + log.info(f"Updating {swf.meta_yml}") + yaml.dump(meta_yaml_corrected, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 24e75eddbf..be282bc453 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -1,4 +1,5 @@ import json +import logging from pathlib import Path import jsonschema.validators @@ -6,6 +7,8 @@ import nf_core.components.components_utils +log = logging.getLogger(__name__) + def meta_yml(subworkflow_lint_object, subworkflow): """ @@ -65,6 +68,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.passed.append(("meta_input", f"`{input}` specified", subworkflow.meta_yml)) else: subworkflow.failed.append(("meta_input", f"`{input}` missing in `meta.yml`", subworkflow.meta_yml)) + else: + log.debug(f"No inputs specified in subworkflow `main.nf`: {subworkflow.component_name}") if "output" in meta_yaml: meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] @@ -75,6 +80,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.failed.append( ("meta_output", f"`{output}` missing in `meta.yml`", subworkflow.meta_yml) ) + else: + log.debug(f"No outputs specified in subworkflow `main.nf`: {subworkflow.component_name}") # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == subworkflow.workflow_name: diff --git a/nf_core/utils.py b/nf_core/utils.py index 663efb6b46..c9e9afb262 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -5,6 +5,7 @@ import concurrent.futures import datetime import errno +import fnmatch import hashlib import io import json @@ -19,7 +20,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -36,6 +37,9 @@ import nf_core +if TYPE_CHECKING: + from nf_core.pipelines.schema import PipelineSchema + log = logging.getLogger(__name__) # ASCII nf-core logo @@ -52,14 +56,29 @@ [ ("qmark", "fg:ansiblue bold"), # token in front of the question ("question", "bold"), # question text - ("answer", "fg:ansigreen nobold bg:"), # submitted answer text behind the question - ("pointer", "fg:ansiyellow bold"), # pointer used in select and checkbox prompts - ("highlighted", "fg:ansiblue bold"), # pointed-at choice in select and checkbox prompts - ("selected", "fg:ansiyellow noreverse bold"), # style for a selected item of a checkbox + ( + "answer", + "fg:ansigreen nobold bg:", + ), # submitted answer text behind the question + ( + "pointer", + "fg:ansiyellow bold", + ), # pointer used in select and checkbox prompts + ( + "highlighted", + "fg:ansiblue bold", + ), # pointed-at choice in select and checkbox prompts + ( + "selected", + "fg:ansiyellow noreverse bold", + ), # style for a selected item of a checkbox ("separator", "fg:ansiblack"), # separator in lists ("instruction", ""), # user instructions for select, rawselect, checkbox ("text", ""), # plain text - ("disabled", "fg:gray italic"), # disabled choices for select and checkbox prompts + ( + "disabled", + "fg:gray italic", + ), # disabled choices for select and checkbox prompts ("choice-default", "fg:ansiblack"), ("choice-default-changed", "fg:ansiyellow"), ("choice-required", "fg:ansired"), @@ -79,7 +98,11 @@ def fetch_remote_version(source_url): return remote_version -def check_if_outdated(current_version=None, remote_version=None, source_url="https://nf-co.re/tools_version"): +def check_if_outdated( + current_version=None, + remote_version=None, + source_url="https://nf-co.re/tools_version", +): """ Check if the current version of nf-core is outdated """ @@ -146,11 +169,12 @@ def __init__(self, wf_path: Path) -> None: self.wf_path = Path(wf_path) self.pipeline_name: Optional[str] = None self.pipeline_prefix: Optional[str] = None - self.schema_obj: Optional[Dict] = None + self.schema_obj: Optional[PipelineSchema] = None + self.repo: Optional[git.Repo] = None try: - repo = git.Repo(self.wf_path) - self.git_sha = repo.head.object.hexsha + self.repo = git.Repo(self.wf_path) + self.git_sha = self.repo.head.object.hexsha except Exception as e: log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") @@ -254,7 +278,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: """ log.debug(f"Got '{wf_path}' as path") - + wf_path = Path(wf_path) config = {} cache_fn = None cache_basedir = None @@ -323,7 +347,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: # If we can, save a cached copy # HINT: during testing phase (in test_download, for example) we don't want - # to save configuration copy in $HOME, otherwise the tests/test_download.py::DownloadTest::test_wf_use_local_configs + # to save configuration copy in $HOME, otherwise the tests/pipelines/test_download.py::DownloadTest::test_wf_use_local_configs # will fail after the first attempt. It's better to not save temporary data # in others folders than tmp when doing tests in general if cache_path and cache_config: @@ -441,6 +465,7 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: if post_data is None: response = requests.get(api_url, headers={"Cache-Control": "no-cache"}) else: + log.debug(f"requesting {api_url} with {post_data}") response = requests.post(url=api_url, data=post_data) except requests.exceptions.Timeout: raise AssertionError(f"URL timed out: {api_url}") @@ -526,7 +551,8 @@ def __call__(self, r): with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) self.auth = requests.auth.HTTPBasicAuth( - gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"] + gh_cli_config["github.com"]["user"], + gh_cli_config["github.com"]["oauth_token"], ) self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" except Exception: @@ -794,12 +820,18 @@ def get_tag_date(tag_date): # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_docker[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_docker[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } elif img["image_type"] == "Singularity": # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_singularity[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_singularity[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } # Obtain common builds from Docker and Singularity images common_keys = list(all_docker.keys() & all_singularity.keys()) current_date = None @@ -929,13 +961,19 @@ def prompt_pipeline_release_branch( # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): - tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] + tag_display = [ + ("fg:ansiblue", f"{tag} "), + ("class:choice-default", "[release]"), + ] choices.append(questionary.Choice(title=tag_display, value=tag)) tag_set.append(str(tag)) # Branches for branch in wf_branches.keys(): - branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] + branch_display = [ + ("fg:ansiyellow", f"{branch} "), + ("class:choice-default", "[branch]"), + ] choices.append(questionary.Choice(title=branch_display, value=branch)) tag_set.append(branch) @@ -966,7 +1004,8 @@ def validate(self, value): return True else: raise questionary.ValidationError( - message="Invalid remote cache index file", cursor_position=len(value.text) + message="Invalid remote cache index file", + cursor_position=len(value.text), ) else: return True @@ -996,7 +1035,13 @@ def get_repo_releases_branches(pipeline, wfs): pipeline = wf.full_name # Store releases and stop loop - wf_releases = list(sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + wf.releases, + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) break # Arbitrary GitHub repo @@ -1016,7 +1061,13 @@ def get_repo_releases_branches(pipeline, wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") except AttributeError: # Success! We have a list, which doesn't work with .get() which is looking for a dict key - wf_releases = list(sorted(rel_r.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + rel_r.json(), + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) # Get release tag commit hashes if len(wf_releases) > 0: @@ -1050,15 +1101,26 @@ def get_repo_releases_branches(pipeline, wfs): class NFCoreTemplateConfig(BaseModel): + """Template configuration schema""" + org: Optional[str] = None + """ Organisation name """ name: Optional[str] = None + """ Pipeline name """ description: Optional[str] = None + """ Pipeline description """ author: Optional[str] = None + """ Pipeline author """ version: Optional[str] = None + """ Pipeline version """ force: Optional[bool] = True + """ Force overwrite of existing files """ outdir: Optional[Union[str, Path]] = None + """ Output directory """ skip_features: Optional[list] = None + """ Skip features. See https://nf-co.re/docs/nf-core-tools/pipelines/create for a list of features. """ is_nfcore: Optional[bool] = None + """ Whether the pipeline is an nf-core pipeline. """ # convert outdir to str @field_validator("outdir") @@ -1081,13 +1143,22 @@ def get(self, item: str, default: Any = None) -> Any: class NFCoreYamlConfig(BaseModel): + """.nf-core.yml configuration file schema""" + repository_type: str + """ Type of repository: pipeline or modules """ nf_core_version: Optional[str] = None + """ Version of nf-core/tools used to create/update the pipeline""" org_path: Optional[str] = None + """ Path to the organisation's modules repository (used for modules repo_type only) """ lint: Optional[LintConfigType] = None + """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ template: Optional[NFCoreTemplateConfig] = None + """ Pipeline template configuration """ bump_version: Optional[Dict[str, bool]] = None + """ Disable bumping of the version for a module/subworkflow (when repository_type is modules). See https://nf-co.re/docs/nf-core-tools/modules/bump-versions for more information.""" update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None + """ Disable updating specific modules/subworkflows (when repository_type is pipeline). See https://nf-co.re/docs/nf-core-tools/modules/update for more information.""" def __getitem__(self, item: str) -> Any: return getattr(self, item) @@ -1136,6 +1207,35 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] error_message += f"\n{error['loc'][0]}: {error['msg']}" raise AssertionError(error_message) + wf_config = fetch_wf_config(Path(directory)) + if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: + # Retrieve information if template from config file is empty + template = tools_config.get("template") + config_template_keys = template.keys() if template is not None else [] + if nf_core_yaml_config.template is None: + # The .nf-core.yml file did not contain template information + nf_core_yaml_config.template = NFCoreTemplateConfig( + org="nf-core", + name=wf_config["manifest.name"].strip("'\"").split("/")[-1], + description=wf_config["manifest.description"].strip("'\""), + author=wf_config["manifest.author"].strip("'\""), + version=wf_config["manifest.version"].strip("'\""), + outdir=str(directory), + is_nfcore=True, + ) + elif "prefix" in config_template_keys or "skip" in config_template_keys: + # The .nf-core.yml file contained the old prefix or skip keys + nf_core_yaml_config.template = NFCoreTemplateConfig( + org=tools_config["template"].get("prefix", tools_config["template"].get("org", "nf-core")), + name=tools_config["template"].get("name", wf_config["manifest.name"].strip("'\"").split("/")[-1]), + description=tools_config["template"].get("description", wf_config["manifest.description"].strip("'\"")), + author=tools_config["template"].get("author", wf_config["manifest.author"].strip("'\"")), + version=tools_config["template"].get("version", wf_config["manifest.version"].strip("'\"")), + outdir=tools_config["template"].get("outdir", str(directory)), + skip_features=tools_config["template"].get("skip", tools_config["template"].get("skip_features")), + is_nfcore=tools_config["template"].get("prefix", tools_config["template"].get("org")) == "nf-core", + ) + log.debug("Using config file: %s", config_fn) return config_fn, nf_core_yaml_config @@ -1158,7 +1258,7 @@ def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> U return None -def sort_dictionary(d): +def sort_dictionary(d: Dict) -> Dict: """Sorts a nested dictionary recursively""" result = {} for k, v in sorted(d.items()): @@ -1299,3 +1399,21 @@ def set_wd(path: Path) -> Generator[None, None, None]: yield finally: os.chdir(start_wd) + + +def get_wf_files(wf_path: Path): + """Return a list of all files in a directory (ignores .gitigore files)""" + + wf_files = [] + + with open(Path(wf_path, ".gitignore")) as f: + lines = f.read().splitlines() + ignore = [line for line in lines if line and not line.startswith("#")] + + for path in Path(wf_path).rglob("*"): + if any(fnmatch.fnmatch(str(path), pattern) for pattern in ignore): + continue + if path.is_file(): + wf_files.append(str(path)) + + return wf_files diff --git a/requirements-dev.txt b/requirements-dev.txt index aa43ee3fe3..aab9b1e5d7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev==1.5.1 +textual-dev==1.6.1 types-PyYAML types-requests types-jsonschema @@ -16,7 +16,7 @@ types-requests types-setuptools typing_extensions >=4.0.0 pytest-asyncio -pytest-textual-snapshot==0.4.0 +pytest-textual-snapshot==1.0.0 pytest-workflow>=2.0.0 pytest>=8.0.0 ruff diff --git a/requirements.txt b/requirements.txt index fb658be2fb..de368c55f0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ filetype GitPython PyGithub jinja2 -jsonschema>=3.0 +jsonschema>=4.0 markdown>=3.3 packaging pillow @@ -18,7 +18,10 @@ requests requests_cache rich-click==1.8.* rich>=13.3.1 +rocrate +repo2rocrate tabulate textual==0.71.0 trogon pdiff +ruamel.yaml diff --git a/setup.py b/setup.py index 45df29b8bc..11b3022494 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.0.0dev" +version = "3.0.3dev" with open("README.md") as f: readme = f.read() diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index cc7e0565e0..5372807987 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -186,6 +186,15 @@ def test_modules_lint_trimgalore(self): assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 + def test_modules_lint_tabix_tabix(self): + """Test linting the tabix/tabix module""" + self.mods_install.install("tabix/tabix") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="tabix/tabix") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + def test_modules_lint_empty(self): """Test linting a pipeline with no modules installed""" self.mods_remove.remove("fastqc", force=True) @@ -265,7 +274,7 @@ def test_modules_lint_patched_modules(self): all_modules=True, ) - assert len(module_lint.failed) == 1 + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -296,6 +305,14 @@ def test_modules_lint_check_url(self): len(mocked_ModuleLint.failed) == failed ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." + def test_modules_lint_update_meta_yml(self): + """update the meta.yml of a module""" + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules, fix=True) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + def test_modules_lint_snapshot_file(self): """Test linting a module with a snapshot file""" module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) @@ -423,7 +440,7 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): ) as fh: yaml_content = yaml.safe_load(fh) # Add a new dependency to the environment.yml file and reverse the order - yaml_content["dependencies"].append("z") + yaml_content["dependencies"].append("z=0.0.0") yaml_content["dependencies"].reverse() yaml_content = yaml.dump(yaml_content) with open( @@ -478,54 +495,6 @@ def test_modules_environment_yml_file_not_array(self): assert len(module_lint.warned) >= 0 assert module_lint.failed[0].lint_test == "environment_yml_valid" - def test_modules_environment_yml_file_name_mismatch(self): - """Test linting a module with a different name in the environment.yml file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - yaml_content = yaml.safe_load(fh) - yaml_content["name"] = "bpipe-test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - # reset changes - yaml_content["name"] = "bpipe_test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_name" - def test_modules_meta_yml_incorrect_licence_field(self): """Test linting a module with an incorrect Licence field in meta.yml""" with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: @@ -552,25 +521,6 @@ def test_modules_meta_yml_incorrect_licence_field(self): assert len(module_lint.warned) >= 0 assert module_lint.failed[0].lint_test == "meta_yml_valid" - def test_modules_meta_yml_input_mismatch(self): - """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("path bam", "path bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2, f"Linting warning with {[x.__dict__ for x in module_lint.warned]}" - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_input_meta_only") == 1 - assert lint_tests.count("meta_input_main_only") == 1 - def test_modules_meta_yml_output_mismatch(self): """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: @@ -582,49 +532,20 @@ def test_modules_meta_yml_output_mismatch(self): module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_output_meta_only") == 1 - assert lint_tests.count("meta_output_main_only") == 1 + assert "Module `meta.yml` does not match `main.nf`" in module_lint.failed[0].message def test_modules_meta_yml_incorrect_name(self): """Test linting a module with an incorrect name in meta.yml""" with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: meta_yml = yaml.safe_load(fh) meta_yml["name"] = "bpipe/test" - # need to make the same change to the environment.yml file - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - environment_yml = yaml.safe_load(fh) - environment_yml["name"] = "bpipe/test" with open( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w", ) as fh: fh.write(yaml.dump(meta_yml)) - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(environment_yml)) module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") @@ -635,19 +556,6 @@ def test_modules_meta_yml_incorrect_name(self): "w", ) as fh: fh.write(yaml.dump(meta_yml)) - environment_yml["name"] = "bpipe_test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(environment_yml)) assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) >= 0 diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index c3eb94d374..2f60cd4a20 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -21,10 +21,10 @@ testing if the update commands works correctly with patch files """ -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" -CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" -SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" -FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" +ORG_SHA = "3dc7c14d29af40f1a0871a675364e437559d97a8" +CORRECT_SHA = "63e780200600e340365b669f9c673b670764c569" +SUCCEED_SHA = "0d0515c3f11266e1314e129bec3e308f804c8dc7" +FAIL_SHA = "cb64a5c1ef85619b89ab99dec2e9097fe84e1dc8" BISMARK_ALIGN = "bismark/align" REPO_NAME = "nf-core-test" PATCH_BRANCH = "patch-tester" @@ -76,7 +76,7 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -94,7 +94,7 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -127,7 +127,7 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -153,7 +153,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -195,7 +195,7 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -234,7 +234,7 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -254,7 +254,7 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -295,7 +295,7 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -349,7 +349,7 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -361,7 +361,7 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr deleted file mode 100644 index 08676899a1..0000000000 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ /dev/null @@ -1,3321 +0,0 @@ -# serializer version: 1 -# name: test_basic_details_custom - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–Šnf-coreβ–Žβ–ŠPipeline Nameβ–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - - - A short description of your pipeline. - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–ŠDescriptionβ–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - - - Name of the main author / authors - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–ŠAuthor(s)β–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_basic_details_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–Šnf-core                                   β–Žβ–ŠPipeline Nameβ–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - - - A short description of your pipeline. - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–ŠDescriptionβ–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - - - Name of the main author / authors - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–ŠAuthor(s)β–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_choose_type - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Choose pipeline type - - - - - Choose "nf-core" if:Choose "Custom" if: - - ● You want your pipeline to be part of the β— Your pipeline will never be part of  - nf-core communitynf-core - ● You think that there's an outside chance β— You want full control over all features  - that it ever could be part of nf-corethat are included from the template  - (including those that are mandatory for  - nf-core). - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  nf-core  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Custom  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - What's the difference? - -   Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions continuous-integration configuration files: - β–ͺ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) - β–ͺ Code formatting checks with Prettier - β–ͺ Auto-fix linting functionality using @nf-core-bot - β–ͺ Marking old issues as stale - ● Inclusion of shared nf-core configuration profiles - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_customisation_help - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Template features - - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Add Github CI testsThe pipeline will  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žinclude several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Use reference genomesThe pipeline will be  Hide help  - β–Šβ–β–β–β–β–β–β–β–β–Žconfigured to use a β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - copy of the most  - common reference β–‚β–‚ - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - β–…β–… - For more information about reference genomes in nf-core pipelines, see the  - - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Add Github badgesThe README.md file of  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žthe pipeline will β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - include GitHub badges - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Add configuration The pipeline will  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Ž        filesinclude configuration β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - profiles containing  - custom parameters  - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_final_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Final details - - - - - First version of the pipelinePath to the output directory where the  - pipeline will be created - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–Š1.0.0devβ–Žβ–Š.                                         β–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Finish  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - -   Now that we have created a new pipeline locally, we can create a new GitHub repository and push    -   the code to it. - - - - - Your GitHub usernameYour GitHub personal access tokenβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - for login. Show  - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - β–ŠGitHub usernameβ–Žβ–Šβ€’β€’β€’β€’β€’β€’β€’β€’β€’β€’β€’β€’                  β–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–Šnf-core                               β–Žβ–Šmypipeline                            β–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - - - β–Œβš οΈ You can't create a repository directly in the nf-core organisation. - β–ŒPlease create the pipeline repo to an organisation where you have access or use your user  - β–Œaccount. A core-team member will be able to transfer the repo to nf-core once the development - β–Œhas started. - - β–ŒπŸ’‘ Your GitHub user account will be used by default if nf-core is given as the org name. - - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–ŽPrivate - β–Šβ–ŽSelect to make the new GitHub repo private. - β–Šβ–β–β–β–β–β–β–β–β–Ž - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_exit_message - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - HowTo create a GitHub repository - - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - -   If you would like to create the GitHub repository later, you can do it manually by following  -   these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd <pipeline_directory> - git remote add origin git@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - git push --all origin - - - β–ŒπŸ’‘ Note the --all flag: this is needed to push all branches to the remote. - - - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Close  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_question - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - -   After creating the pipeline template locally, we can create a GitHub repository and push the  -   code to it. - -   Do you want to create a GitHub repository? - - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_custom - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Template features - - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Add Github CI testsThe pipeline will  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žinclude several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Use reference genomesThe pipeline will be  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žconfigured to use a β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - copy of the most  - common reference  - genome files from  - iGenomesβ–‡β–‡ - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Add Github badgesThe README.md file of  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žthe pipeline will β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - include GitHub badges - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Add configuration The pipeline will  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Ž        filesinclude configuration β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Use code lintersThe pipeline will  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žinclude code linters β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - and CI tests to lint  - your code: pre-commit, - editor-config and  - prettier. - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Include citationsInclude pipeline tools Show help  - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Template features - - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Use reference genomesThe pipeline will be  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žconfigured to use a β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - copy of the most common - reference genome files  - from iGenomes - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Use multiqcThe pipeline will  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žinclude the MultiQC β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - module which generates  - an HTML report for  - quality control. - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” - β–Šβ–Ž        Use fastqcThe pipeline will  Show help  - β–Šβ–β–β–β–β–β–β–β–β–Žinclude the FastQC β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– - module which performs  - quality control  - analysis of input FASTQ - files. - - - - - - - - - - - - - - - - - - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore_validation - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–Šnf-core                                   β–Žβ–ŠPipeline Nameβ–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - Value error, Must be lowercase without  - punctuation. - - - - A short description of your pipeline. - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–ŠDescriptionβ–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - Value error, Cannot be left empty. - - - - Name of the main author / authors - - β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž - β–ŠAuthor(s)β–Ž - β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž - Value error, Cannot be left empty. - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_welcome - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - ⭘nf-core create β€” Create a new pipeline with the nf-core pipeline template - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - - - Welcome to the nf-core pipeline creation wizard - -   This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. - -   The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    -   pipelines. - - β–ŒπŸ’‘ If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with - β–Œthe community as early as possible; ideally before you start on your pipeline! See the  - β–Œnf-core guidelines and the #new-pipelines Slack channel for more information. - - - β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” -  Let's go!  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg new file mode 100644 index 0000000000..f327dac799 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–Šnf-coreβ–Žβ–ŠPipeline Nameβ–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + + + +A short description of your pipeline. + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–ŠDescriptionβ–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + + + +Name of the main author / authors + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–ŠAuthor(s)β–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg new file mode 100644 index 0000000000..6a4e424130 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–Šnf-core                                   β–Žβ–ŠPipeline Nameβ–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + + + +A short description of your pipeline. + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–ŠDescriptionβ–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + + + +Name of the main author / authors + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–ŠAuthor(s)β–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg new file mode 100644 index 0000000000..3eaceeb477 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Choose pipeline type + + + + +Choose "nf-core" if:Choose "Custom" if: + +● You want your pipeline to be part of the β— Your pipeline will never be part of  +nf-core communitynf-core +● You think that there's an outside chance β— You want full control over all features  +that it ever could be part of nf-corethat are included from the template  +(including those that are mandatory for  +nf-core). +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + nf-core  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Custom  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + +What's the difference? + +  Choosing "nf-core" effectively pre-selects the following template features: + +● GitHub Actions continuous-integration configuration files: +β–ͺ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) +β–ͺ Code formatting checks with Prettier +β–ͺ Auto-fix linting functionality using @nf-core-bot +β–ͺ Marking old issues as stale +● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg new file mode 100644 index 0000000000..07ab592d27 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -0,0 +1,275 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–Šβ–Ž        Toggle all features +β–Šβ–β–β–β–β–β–β–β–β–Ž +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use a GitHub Create a GitHub  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Ž        repository.repository for the β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +pipeline. + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Add Github CI testsThe pipeline will  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Žinclude several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI)  +testing + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use reference genomesThe pipeline will be  Hide help  +β–Šβ–β–β–β–β–β–β–β–β–Žconfigured to use a β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +β–…β–… +For more information about reference genomes in nf-core pipelines, see the  + + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Add Github badgesThe README.md file of  Show help  +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg new file mode 100644 index 0000000000..74c232f747 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Final details + + + + +First version of the pipelinePath to the output directory where the  +pipeline will be created +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–Š1.0.0devβ–Žβ–Š.                                         β–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Finish  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg new file mode 100644 index 0000000000..77a293fb79 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Create GitHub repository + +  Now that we have created a new pipeline locally, we can create a new GitHub repository and push    +  the code to it. + + + + +Your GitHub usernameYour GitHub personal access tokenβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +for login. Show  +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +β–ŠGitHub usernameβ–Žβ–Šβ€’β€’β€’β€’β€’β€’β€’β€’β€’β€’β€’β€’                  β–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + + + +The name of the organisation where the The name of the new GitHub repository +GitHub repo will be cretaed +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–Šnf-core                               β–Žβ–Šmypipeline                            β–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž + + +β–Œβš οΈ You can't create a repository directly in the nf-core organisation. +β–ŒPlease create the pipeline repo to an organisation where you have access or use your user  +β–Œaccount. A core-team member will be able to transfer the repo to nf-core once the development +β–Œhas started. + +β–ŒπŸ’‘ Your GitHub user account will be used by default if nf-core is given as the org name. + + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–ŽPrivate +β–Šβ–ŽSelect to make the new GitHub repo private. +β–Šβ–β–β–β–β–β–β–β–β–Ž +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg new file mode 100644 index 0000000000..1be8c63f10 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +HowTo create a GitHub repository + + + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +  If you would like to create the GitHub repository later, you can do it manually by following  +  these steps: + + 1. Create a new GitHub repository + 2. Add the remote to your local repository: + + +cd <pipeline_directory> +git remote add origin git@github.com:<username>/<repo_name>.git + + + 3. Push the code to the remote: + + +git push --all origin + + +β–ŒπŸ’‘ Note the --all flag: this is needed to push all branches to the remote. + + + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Close  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg new file mode 100644 index 0000000000..8aad414e62 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg @@ -0,0 +1,265 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Create GitHub repository + + +  After creating the pipeline template locally, we can create a GitHub repository and push the  +  code to it. + +  Do you want to create a GitHub repository? + + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg new file mode 100644 index 0000000000..cff0309159 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–Šβ–Ž        Toggle all features +β–Šβ–β–β–β–β–β–β–β–β–Ž +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use a GitHub Create a GitHub  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Ž        repository.repository for the β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +pipeline. + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Add Github CI testsThe pipeline will  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Žinclude several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) β–β– +testing + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use reference genomesThe pipeline will be  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Žconfigured to use a β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +copy of the most  +common reference  +genome files from  +iGenomes + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Add Github badgesThe README.md file of  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Žthe pipeline will β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +include GitHub badges + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Add configuration The pipeline will  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Ž        filesinclude configuration β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +profiles containing  +custom parameters  +requried to run  +nf-core pipelines at  +different institutions + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use code lintersThe pipeline will  Show help  +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg new file mode 100644 index 0000000000..c18bb31b8e --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use reference genomesThe pipeline will be  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Žconfigured to use a β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +copy of the most common +reference genome files  +from iGenomes + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use multiqcThe pipeline will  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Žinclude the MultiQC β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +module which generates  +an HTML report for  +quality control. + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use fastqcThe pipeline will  Show help  +β–Šβ–β–β–β–β–β–β–β–β–Žinclude the FastQC β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +module which performs  +quality control  +analysis of input FASTQ +files. + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” +β–Šβ–Ž        Use nf-schemaUse the nf-schema  Show help  +β–Šβ–β–β–β–β–β–β–β–β–ŽNextflow plugin for β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β– +this pipeline. + + + + + + + + + + + + + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg new file mode 100644 index 0000000000..fd6f2532c8 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg @@ -0,0 +1,273 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Žβ–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–Šnf-core                                   β–Žβ–ŠPipeline Nameβ–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Žβ–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž +Value error, Must be lowercase without  +punctuation. + + + +A short description of your pipeline. + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–ŠDescriptionβ–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž +Value error, Cannot be left empty. + + + +Name of the main author / authors + +β–Šβ–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–Ž +β–ŠAuthor(s)β–Ž +β–Šβ–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–β–Ž +Value error, Cannot be left empty. + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg new file mode 100644 index 0000000000..d9941b650d --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + ⭘nf-core pipelines create β€” Create a new pipeline with the nf-core pipeline templa… + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + + + +Welcome to the nf-core pipeline creation wizard + +  This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. + +  The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    +  pipelines. + +β–ŒπŸ’‘ If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with +β–Œthe community as early as possible; ideally before you start on your pipeline! See the  +β–Œnf-core guidelines and the #new-pipelines Slack channel for more information. + + +β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–”β–” + Let's go!  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index 85ba817536..97dd346cdf 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -54,27 +54,6 @@ def test_files_exist_pass(self): results = lint_obj.files_exist() assert results["failed"] == [] - def test_files_exist_pass_conditional(self): - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["plugins"] = [] - lib_dir = Path(self.new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == [] - assert results["ignored"] == [] - - def test_files_exist_fail_conditional(self): - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lib_dir = Path(self.new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] - assert results["ignored"] == [] - def test_files_exist_pass_conditional_nfschema(self): # replace nf-validation with nf-schema in nextflow.config with open(Path(self.new_pipeline, "nextflow.config")) as f: diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index 3cc9355452..a655fb8ace 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -30,7 +30,6 @@ def test_default_values_match(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 - assert "Config default value correct: params.max_cpus" in str(result["passed"]) assert "Config default value correct: params.validate_params" in str(result["passed"]) def test_nextflow_config_bad_name_fail(self): @@ -71,18 +70,18 @@ def test_nextflow_config_missing_test_profile_failed(self): def test_default_values_fail(self): """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" - # Change the default value of max_cpus in nextflow.config + # Change the default value of max_multiqc_email_size in nextflow.config nf_conf_file = Path(self.new_pipeline) / "nextflow.config" with open(nf_conf_file) as f: content = f.read() - fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) + fail_content = re.sub(r"\bmax_multiqc_email_size\s*=\s*'25.MB'", "max_multiqc_email_size = '0'", content) with open(nf_conf_file, "w") as f: f.write(fail_content) - # Change the default value of max_memory in nextflow_schema.json + # Change the default value of custom_config_version in nextflow_schema.json nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" with open(nf_schema_file) as f: content = f.read() - fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) + fail_content = re.sub(r'"default": "master"', '"default": "main"', content) with open(nf_schema_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) @@ -90,11 +89,11 @@ def test_default_values_fail(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 2 assert ( - "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." + "Config default value incorrect: `params.max_multiqc_email_size` is set as `25.MB` in `nextflow_schema.json` but is `0` in `nextflow.config`." in result["failed"] ) assert ( - "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." + "Config default value incorrect: `params.custom_config_version` is set as `main` in `nextflow_schema.json` but is `master` in `nextflow.config`." in result["failed"] ) @@ -103,14 +102,14 @@ def test_catch_params_assignment_in_main_nf(self): # Add parameter assignment in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time = 42") + f.write("params.custom_config_base = 'test'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() - assert len(result["failed"]) == 1 + assert len(result["failed"]) == 2 assert ( - result["failed"][0] - == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." + result["failed"][1] + == "Config default value incorrect: `params.custom_config_base` is set as `https://raw.githubusercontent.com/nf-core/configs/master` in `nextflow_schema.json` but is `null` in `nextflow.config`." ) def test_allow_params_reference_in_main_nf(self): @@ -118,7 +117,7 @@ def test_allow_params_reference_in_main_nf(self): # Add parameter reference in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time == 42") + f.write("params.custom_config_version == 'main'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() @@ -126,10 +125,11 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" - # Add max_cpus to the ignore list + # Add custom_config_version to the ignore list nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" nf_core_yml = NFCoreYamlConfig( - repository_type="pipeline", lint={"nextflow_config": [{"config_defaults": ["params.max_cpus"]}]} + repository_type="pipeline", + lint={"nextflow_config": [{"config_defaults": ["params.custom_config_version"]}]}, ) with open(nf_core_yml_path, "w") as f: yaml.dump(nf_core_yml.model_dump(), f) @@ -140,8 +140,8 @@ def test_default_values_ignored(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["ignored"]) == 1 - assert "Config default value correct: params.max_cpu" not in str(result["passed"]) - assert "Config default ignored: params.max_cpus" in str(result["ignored"]) + assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) + assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) def test_default_values_float(self): """Test comparing two float values.""" @@ -150,7 +150,9 @@ def test_default_values_float(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) @@ -180,7 +182,9 @@ def test_default_values_float_fail(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) diff --git a/tests/pipelines/lint/test_plugin_includes.py b/tests/pipelines/lint/test_plugin_includes.py new file mode 100644 index 0000000000..8eb31e2671 --- /dev/null +++ b/tests/pipelines/lint/test_plugin_includes.py @@ -0,0 +1,24 @@ +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintPluginIncludes(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_default_values_match(self): + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + result = lint_obj.plugin_includes() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + + def test_wrong_include(self): + test_path = self.new_pipeline / "test.nf" + with open(test_path, "w") as of: + of.write("include { paramsSummary } from 'plugin/nf-validation'\n") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + result = lint_obj.plugin_includes() + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 diff --git a/tests/pipelines/test_bump_version.py b/tests/pipelines/test_bump_version.py index 709e82427d..8af5c0e4d1 100644 --- a/tests/pipelines/test_bump_version.py +++ b/tests/pipelines/test_bump_version.py @@ -13,12 +13,25 @@ def test_bump_pipeline_version(self): """Test that making a release with the working example files works""" # Bump the version number - nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1") + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1.0") new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) # Check nextflow.config new_pipeline_obj.load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1.0" + + # Check multiqc_config.yml + with open(new_pipeline_obj._fp("assets/multiqc_config.yml")) as fh: + multiqc_config = yaml.safe_load(fh) + + assert "report_comment" in multiqc_config + assert "/releases/tag/1.1.0" in multiqc_config["report_comment"] + + # Check .nf-core.yml + with open(new_pipeline_obj._fp(".nf-core.yml")) as fh: + nf_core_yml = yaml.safe_load(fh) + if nf_core_yml["template"]: + assert nf_core_yml["template"]["version"] == "1.1.0" def test_dev_bump_pipeline_version(self): """Test that making a release works with a dev name and a leading v""" @@ -33,7 +46,7 @@ def test_dev_bump_pipeline_version(self): def test_bump_nextflow_version(self): # Bump the version number to a specific version, preferably one # we're not already on - version = "22.04.3" + version = "25.04.2" nf_core.pipelines.bump_version.bump_nextflow_version(self.pipeline_obj, version) new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) new_pipeline_obj._load() diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py index 13fd3b24cd..f83cc274fc 100644 --- a/tests/pipelines/test_create.py +++ b/tests/pipelines/test_create.py @@ -15,6 +15,7 @@ PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" PIPELINE_TEMPLATE_YML_SKIP = TEST_DATA_DIR / "pipeline_create_template_skip.yml" +PIPELINE_TEMPLATE = Path(nf_core.__file__).parent / "pipeline-template" class NfcoreCreateTest(unittest.TestCase): @@ -134,3 +135,41 @@ def test_pipeline_creation_with_yml_skip(self, tmp_path): assert not (pipeline.outdir / ".github").exists() assert not (pipeline.outdir / "conf" / "igenomes.config").exists() assert not (pipeline.outdir / ".editorconfig").exists() + + def test_template_customisation_all_files_grouping(self): + """Test that all pipeline template files are included in a pipeline customisation group.""" + template_features_yml = load_features_yaml() + base_required_files = [ + ".gitignore", + ".nf-core.yml", + "README.md", + "nextflow.config", + "CITATIONS.md", + "main.nf", + "workflows/pipeline.nf", + ] + all_skipped_files = [] + for feature in template_features_yml.keys(): + if template_features_yml[feature]["skippable_paths"]: + all_skipped_files.extend(template_features_yml[feature]["skippable_paths"]) + + for root, _, files in os.walk(PIPELINE_TEMPLATE): + for file in files: + str_path = str((Path(root) / file).relative_to(PIPELINE_TEMPLATE)) + if str_path not in base_required_files: + try: + assert ( + str_path in all_skipped_files + ), f"Template file `{str_path}` not present in a group for pipeline customisation in `template_features.yml`." + except AssertionError: + if "/" in str_path: + # Check if the parent directory is in the skipped files + upper_dir_present = False + for i in range(1, len(str_path.split("/"))): + upper_dir = "/".join(str_path.split("/")[:i]) + if upper_dir in all_skipped_files: + upper_dir_present = True + break + assert upper_dir_present, f"Template file `{str_path}` not present in a group for pipeline customisation in `template_features.yml`." + else: + raise diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index 7c6e3d6196..5e230528a7 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -47,7 +47,7 @@ def test_launch_file_exists_overwrite(self, mock_webbrowser, mock_lauch_web_gui, def test_get_pipeline_schema(self): """Test loading the params schema from a pipeline""" self.launcher.get_pipeline_schema() - assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 + assert len(self.launcher.schema_obj.schema["$defs"]["input_output_options"]["properties"]) > 2 @with_temporary_folder def test_make_pipeline_schema(self, tmp_path): @@ -60,8 +60,8 @@ def test_make_pipeline_schema(self, tmp_path): Path(test_pipeline_dir, "nextflow_schema.json").unlink() self.launcher = nf_core.pipelines.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) self.launcher.get_pipeline_schema() - assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) >= 2 - assert self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]["outdir"] == { + assert len(self.launcher.schema_obj.schema["$defs"]["input_output_options"]["properties"]) >= 2 + assert self.launcher.schema_obj.schema["$defs"]["input_output_options"]["properties"]["outdir"] == { "type": "string", "format": "directory-path", "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", @@ -91,8 +91,8 @@ def test_nf_merge_schema(self): self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() self.launcher.merge_nxf_flag_schema() - assert self.launcher.schema_obj.schema["allOf"][0] == {"$ref": "#/definitions/coreNextflow"} - assert "-resume" in self.launcher.schema_obj.schema["definitions"]["coreNextflow"]["properties"] + assert self.launcher.schema_obj.schema["allOf"][0] == {"$ref": "#/$defs/coreNextflow"} + assert "-resume" in self.launcher.schema_obj.schema["$defs"]["coreNextflow"]["properties"] def test_ob_to_questionary_string(self): """Check converting a python dict to a pyenquirer format - simple strings""" @@ -101,7 +101,12 @@ def test_ob_to_questionary_string(self): "default": "data/*{1,2}.fastq.gz", } result = self.launcher.single_param_to_questionary("input", sc_obj) - assert result == {"type": "input", "name": "input", "message": "", "default": "data/*{1,2}.fastq.gz"} + assert result == { + "type": "input", + "name": "input", + "message": "", + "default": "data/*{1,2}.fastq.gz", + } @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): @@ -123,7 +128,8 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): assert exc_info.value.args[0].startswith("Web launch response not recognised:") @mock.patch( - "nf_core.utils.poll_nfcore_web_api", side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}] + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}], ) @mock.patch("webbrowser.open") @mock.patch("nf_core.utils.wait_cli_function") @@ -133,7 +139,10 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa self.launcher.merge_nxf_flag_schema() assert self.launcher.launch_web_gui() is None - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "error", "message": "foo"}], + ) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status error""" with pytest.raises(AssertionError) as exc_info: @@ -147,12 +156,18 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): self.launcher.get_web_launch_response() assert exc_info.value.args[0].startswith("Web launch GUI returned unexpected status (foo): ") - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "waiting_for_user"}], + ) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status waiting_for_user""" assert self.launcher.get_web_launch_response() is False - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "launch_params_complete"}], + ) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - complete, but missing keys""" with pytest.raises(AssertionError) as exc_info: @@ -185,11 +200,9 @@ def test_sanitise_web_response(self): self.launcher.get_pipeline_schema() self.launcher.nxf_flags["-name"] = "" self.launcher.schema_obj.input_params["igenomes_ignore"] = "true" - self.launcher.schema_obj.input_params["max_cpus"] = "12" self.launcher.sanitise_web_response() assert "-name" not in self.launcher.nxf_flags assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True - assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): """Check converting a python dict to a pyenquirer format - booleans""" @@ -262,7 +275,10 @@ def test_ob_to_questionary_enum(self): def test_ob_to_questionary_pattern(self): """Check converting a python dict to a questionary format - with pattern""" - sc_obj = {"type": "string", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"} + sc_obj = { + "type": "string", + "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + } result = self.launcher.single_param_to_questionary("email", sc_obj) assert result["type"] == "input" assert result["validate"]("test@email.com") is True diff --git a/tests/pipelines/test_schema.py b/tests/pipelines/test_schema.py index 633de3db69..2abaf07bd2 100644 --- a/tests/pipelines/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -24,6 +24,9 @@ class TestSchema(unittest.TestCase): def setUp(self): """Create a new PipelineSchema object""" self.schema_obj = nf_core.pipelines.schema.PipelineSchema() + self.schema_obj.schema_draft = "https://json-schema.org/draft/2020-12/schema" + self.schema_obj.defs_notation = "$defs" + self.schema_obj.validation_plugin = "nf-schema" self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Create a test pipeline in temp directory @@ -105,7 +108,7 @@ def test_schema_docs(self): docs = self.schema_obj.print_documentation() assert self.schema_obj.schema["title"] in docs assert self.schema_obj.schema["description"] in docs - for definition in self.schema_obj.schema.get("definitions", {}).values(): + for definition in self.schema_obj.schema.get("$defs", {}).values(): assert definition["title"] in docs assert definition["description"] in docs @@ -175,40 +178,43 @@ def test_validate_schema_fail_duplicate_ids(self): Check that the schema validation fails when we have duplicate IDs in definition subschema """ self.schema_obj.schema = { - "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"foo": {}}}}, - "allOf": [{"$ref": "#/definitions/groupOne"}, {"$ref": "#/definitions/groupTwo"}], + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"foo": {}}}}, + "allOf": [{"$ref": "#/$defs/groupOne"}, {"$ref": "#/$defs/groupTwo"}], } with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - assert exc_info.value.args[0] == "Duplicate parameter found in schema `definitions`: `foo`" + assert exc_info.value.args[0] == "Duplicate parameter found in schema `$defs`: `foo`" def test_validate_schema_fail_missing_def(self): """ - Check that the schema validation fails when we a definition in allOf is not in definitions + Check that the schema validation fails when we a definition in allOf is not in $defs """ self.schema_obj.schema = { - "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, - "allOf": [{"$ref": "#/definitions/groupOne"}], + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, + "allOf": [{"$ref": "#/$defs/groupOne"}], } with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - assert exc_info.value.args[0] == "Definition subschema `groupTwo` not included in schema `allOf`" + assert exc_info.value.args[0] == "Definition subschema `#/$defs/groupTwo` not included in schema `allOf`" def test_validate_schema_fail_unexpected_allof(self): """ Check that the schema validation fails when we an unrecognised definition is in allOf """ self.schema_obj.schema = { - "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, "allOf": [ - {"$ref": "#/definitions/groupOne"}, - {"$ref": "#/definitions/groupTwo"}, - {"$ref": "#/definitions/groupThree"}, + {"$ref": "#/$defs/groupOne"}, + {"$ref": "#/$defs/groupTwo"}, + {"$ref": "#/$defs/groupThree"}, ], } with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - assert exc_info.value.args[0] == "Subschema `groupThree` found in `allOf` but not `definitions`" + assert exc_info.value.args[0] == "Subschema `groupThree` found in `allOf` but not `$defs`" def test_make_skeleton_schema(self): """Test making a new schema skeleton""" @@ -264,7 +270,7 @@ def test_remove_schema_notfound_configs_childschema(self): even when they're in a group """ self.schema_obj.schema = { - "definitions": { + "$defs": { "subSchemaId": { "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, "required": ["foo"], @@ -274,8 +280,8 @@ def test_remove_schema_notfound_configs_childschema(self): self.schema_obj.pipeline_params = {"bar": True} self.schema_obj.no_prompts = True params_removed = self.schema_obj.remove_schema_notfound_configs() - assert len(self.schema_obj.schema["definitions"]["subSchemaId"]["properties"]) == 1 - assert "required" not in self.schema_obj.schema["definitions"]["subSchemaId"] + assert len(self.schema_obj.schema["$defs"]["subSchemaId"]["properties"]) == 1 + assert "required" not in self.schema_obj.schema["$defs"]["subSchemaId"] assert len(params_removed) == 1 assert "foo" in params_removed diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index c8b97fd0b0..d94b55b3d3 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -22,7 +22,7 @@ def test_subworkflows_lint_empty(self): """Test linting a pipeline with no subworkflows installed""" self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) + self.subworkflow_remove.remove("utils_nfschema_plugin", force=True) nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) assert "No subworkflows from https://github.com/nf-core/modules.git installed in pipeline" in self.caplog.text @@ -63,6 +63,14 @@ def test_subworkflows_lint_multiple_remotes(self): assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 + def test_subworkflows_lint_update_meta_yml(self): + """update the meta.yml of a subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules, fix=True) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + def test_subworkflows_lint_snapshot_file(self): """Test linting a subworkflow with a snapshot file""" subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) diff --git a/tests/test_cli.py b/tests/test_cli.py index 026efd1e6a..bea0223f06 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -167,7 +167,7 @@ def test_cli_download(self, mock_dl): "compress": "tar.gz", "force": None, "platform": None, - "download-configuration": None, + "download-configuration": "yes", "tag": "3.12=testing", "container-system": "singularity", "container-library": "quay.io", @@ -188,7 +188,7 @@ def test_cli_download(self, mock_dl): params["compress"], "force" in params, "platform" in params, - "download-configuration" in params, + params["download-configuration"], (params["tag"],), params["container-system"], (params["container-library"],), diff --git a/tests/test_modules.py b/tests/test_modules.py index 0e16497176..d0692236e8 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -7,7 +7,7 @@ import pytest import requests_cache import responses -import yaml +import ruamel.yaml import nf_core.modules import nf_core.modules.create @@ -16,6 +16,7 @@ import nf_core.modules.remove import nf_core.pipelines.create.create from nf_core import __version__ +from nf_core.pipelines.lint_utils import run_prettier_on_file from nf_core.utils import NFCoreYamlConfig from .utils import ( @@ -28,11 +29,15 @@ create_tmp_pipeline, mock_anaconda_api_calls, mock_biocontainers_api_calls, + mock_biotools_api_calls, ) def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) root_dir = Path(tmp_dir, "modules") Path(root_dir, "modules", "nf-core").mkdir(parents=True) @@ -42,13 +47,14 @@ def create_modules_repo_dummy(tmp_dir): nf_core_yml = NFCoreYamlConfig(nf_core_version=__version__, repository_type="modules", org_path="nf-core") with open(Path(root_dir, ".nf-core.yml"), "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh) - # mock biocontainers and anaconda response + # mock biocontainers and anaconda response and biotools response with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") mock_biocontainers_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") + mock_biotools_api_calls(rsps, "bpipe") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules module_create = nf_core.modules.create.ModuleCreate( - root_dir, "bpipe/test", "@author", "process_single", False, False + root_dir, "bpipe/test", "@author", "process_single", True, False ) with requests_cache.disabled(): assert module_create.create() @@ -57,10 +63,11 @@ def create_modules_repo_dummy(tmp_dir): meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") with open(str(meta_yml_path)) as fh: - meta_yml = yaml.safe_load(fh) + meta_yml = yaml.load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] with open(str(meta_yml_path), "w") as fh: yaml.dump(meta_yml, fh) + run_prettier_on_file(fh.name) # Add dummy content to main.nf.test.snap test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") diff --git a/tests/test_rocrate.py b/tests/test_rocrate.py new file mode 100644 index 0000000000..6defd5d5e8 --- /dev/null +++ b/tests/test_rocrate.py @@ -0,0 +1,83 @@ +"""Test the nf-core pipelines rocrate command""" + +import shutil +import tempfile +import unittest +from pathlib import Path + +import rocrate.rocrate +from git import Repo + +import nf_core.pipelines.create +import nf_core.pipelines.create.create +import nf_core.pipelines.rocrate +import nf_core.utils + + +class TestROCrate(unittest.TestCase): + """Class for lint tests""" + + def setUp(self): + """Function that runs at start of tests for common resources + + Use nf_core.create() to make a pipeline that we can use for testing + """ + + self.tmp_dir = Path(tempfile.mkdtemp()) + self.test_pipeline_dir = Path(self.tmp_dir, "nf-core-testpipeline") + self.create_obj = nf_core.pipelines.create.create.PipelineCreate( + name="testpipeline", + description="This is a test pipeline", + author="Test McTestFace", + outdir=str(self.test_pipeline_dir), + version="1.0.0", + no_git=False, + force=True, + ) + self.create_obj.init_pipeline() + + # add fake metro map + Path(self.test_pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() + # commit the changes + repo = Repo(self.test_pipeline_dir) + repo.git.add(A=True) + repo.index.commit("Initial commit") + + def tearDown(self): + """Clean up temporary files and folders""" + + if self.tmp_dir.exists(): + shutil.rmtree(self.tmp_dir) + + def test_rocrate_creation(self): + """Run the nf-core rocrate command""" + + # Run the command + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.test_pipeline_dir) + self.rocrate_obj.create_rocrate(self.test_pipeline_dir, metadata_path=Path(self.test_pipeline_dir)) + + # Check that the crate was created + self.assertTrue(Path(self.test_pipeline_dir, "ro-crate-metadata.json").exists()) + + # Check that the entries in the crate are correct + crate = rocrate.rocrate.ROCrate(self.test_pipeline_dir) + entities = crate.get_entities() + + # Check if the correct entities are set: + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "./": + self.assertEqual(entity_json.get("name"), "nf-core/testpipeline") + self.assertEqual(entity_json["mainEntity"], {"@id": "main.nf"}) + elif entity_json["@id"] == "#main.nf": + self.assertEqual(entity_json["programmingLanguage"], [{"@id": "#nextflow"}]) + self.assertEqual(entity_json["image"], [{"@id": "nf-core-testpipeline_metro_map.png"}]) + # assert there is a metro map + # elif entity_json["@id"] == "nf-core-testpipeline_metro_map.png": # FIXME waiting for https://github.com/ResearchObject/ro-crate-py/issues/174 + # self.assertEqual(entity_json["@type"], ["File", "ImageObject"]) + # assert that author is set as a person + elif "name" in entity_json and entity_json["name"] == "Test McTestFace": + self.assertEqual(entity_json["@type"], "Person") + # check that it is set as author of the main entity + if crate.mainEntity is not None: + self.assertEqual(crate.mainEntity["author"][0].id, entity_json["@id"]) diff --git a/tests/utils.py b/tests/utils.py index 1d5a8a115d..022b91227f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -98,13 +98,22 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) +def mock_biotools_api_calls(rsps: responses.RequestsMock, module: str) -> None: + """Mock biotools api calls for module""" + biotools_api_url = f"https://bio.tools/api/t/?q={module}&format=json" + biotools_mock = { + "list": [{"name": "Bpipe", "biotoolsCURIE": "biotools:bpipe"}], + } + rsps.get(biotools_api_url, json=biotools_mock, status=200) + + def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" tmp_dir = Path(tempfile.TemporaryDirectory().name) root_repo_dir = Path(__file__).resolve().parent.parent template_dir = root_repo_dir / "nf_core" / "pipeline-template" - pipeline_name = "mypipeline" + pipeline_name = "testpipeline" pipeline_dir = tmp_dir / pipeline_name pipeline_dir.mkdir(parents=True) @@ -114,7 +123,7 @@ def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: org_path="nf-core", lint=None, template=NFCoreTemplateConfig( - name="mypipeline", + name="testpipeline", author="me", description="it is mine", org="nf-core",