diff --git a/.editorconfig b/.editorconfig index 29bf7275..cb1fec9e 100644 --- a/.editorconfig +++ b/.editorconfig @@ -38,10 +38,9 @@ indent_size = unset [/assets/*.Rmd] indent_size = unset -# ignore Readme -[README.md] -indent_style = unset +[/assets/*.Rmd] +indent_size = unset -# ignore python +# ignore python and markdown [*.{py,md}] indent_style = unset diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index c13f8d82..1c3a0658 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -18,7 +18,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/airr - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/airrflow/tree/master/.github/CONTRIBUTING.md) - [ ] If necessary, also make a PR on the nf-core/airrflow _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. - [ ] Make sure your code lints (`nf-core lint`). -- [ ] Ensure the test suite passes (`nf-test test main.nf.test -profile test,docker`). +- [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). - [ ] Usage Documentation in `docs/usage.md` is updated. - [ ] Output Documentation in `docs/output.md` is updated. diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index 62ab5695..9c799e4b 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -8,12 +8,12 @@ on: types: [published] workflow_dispatch: jobs: - run-tower: + run-platform: name: Run AWS full tests if: github.repository == 'nf-core/airrflow' runs-on: ubuntu-latest steps: - - name: Launch workflow via tower + - name: Launch workflow via Seqera Platform uses: seqeralabs/action-tower-launch@v2 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} @@ -30,7 +30,7 @@ jobs: - uses: actions/upload-artifact@v4 with: - name: Tower debug log file + name: Seqera Platform debug log file path: | - tower_action_*.log - tower_action_*.json + seqera_platform_action_*.log + seqera_platform_action_*.json diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index 869b2ab2..e4d97f6d 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -5,13 +5,13 @@ name: nf-core AWS test on: workflow_dispatch: jobs: - run-tower: + run-platform: name: Run AWS tests if: github.repository == 'nf-core/airrflow' runs-on: ubuntu-latest steps: - # Launch workflow using Tower CLI tool action - - name: Launch workflow via tower + # Launch workflow using Seqera Platform CLI tool action + - name: Launch workflow via Seqera Platform uses: seqeralabs/action-tower-launch@v2 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} @@ -27,7 +27,7 @@ jobs: - uses: actions/upload-artifact@v4 with: - name: Tower debug log file + name: Seqera Platform debug log file path: | - tower_action_*.log - tower_action_*.json + seqera_platform_action_*.log + seqera_platform_action_*.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 515482aa..e3ff5e02 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,10 +28,10 @@ jobs: - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@v2 with: version: "${{ matrix.NXF_VER }}" diff --git a/.github/workflows/download_pipeline.yml b/.github/workflows/download_pipeline.yml index 08622fd5..2d20d644 100644 --- a/.github/workflows/download_pipeline.yml +++ b/.github/workflows/download_pipeline.yml @@ -14,6 +14,8 @@ on: pull_request: types: - opened + - edited + - synchronize branches: - master pull_request_target: @@ -28,11 +30,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@v2 - - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + - name: Disk space cleanup + uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 + + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 with: - python-version: "3.11" + python-version: "3.12" architecture: "x64" - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 with: @@ -65,8 +70,17 @@ jobs: - name: Inspect download run: tree ./${{ env.REPOTITLE_LOWERCASE }} - - name: Run the downloaded pipeline + - name: Run the downloaded pipeline (stub) + id: stub_run_pipeline + continue-on-error: true env: NXF_SINGULARITY_CACHEDIR: ./ NXF_SINGULARITY_HOME_MOUNT: true run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results + - name: Run the downloaded pipeline (stub run not supported) + id: run_pipeline + if: ${{ job.steps.stub_run_pipeline.status == failure() }} + env: + NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_HOME_MOUNT: true + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 8dda78ab..2261f74f 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -32,9 +32,9 @@ jobs: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} # Install and run pre-commit - - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 with: - python-version: 3.11 + python-version: "3.12" - name: Install pre-commit run: pip install pre-commit diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 073e1876..1fcafe88 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -14,13 +14,12 @@ jobs: pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - name: Set up Python 3.11 - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + - name: Set up Python 3.12 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 with: - python-version: 3.11 - cache: "pip" + python-version: "3.12" - name: Install pre-commit run: pip install pre-commit @@ -32,14 +31,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@v2 - - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 with: - python-version: "3.11" + python-version: "3.12" architecture: "x64" - name: Install dependencies @@ -60,7 +59,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 with: name: linting-logs path: | diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index b706875f..40acc23f 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@f6b0bace624032e30a85a8fd9c1a7f8f611f5737 # v3 + uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3 with: workflow: linting.yml workflow_conclusion: completed diff --git a/.github/workflows/release-announcements.yml b/.github/workflows/release-announcements.yml index d468aeaa..03ecfcf7 100644 --- a/.github/workflows/release-announcements.yml +++ b/.github/workflows/release-announcements.yml @@ -12,7 +12,7 @@ jobs: - name: get topics and convert to hashtags id: get_topics run: | - curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ' >> $GITHUB_OUTPUT + echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" >> $GITHUB_OUTPUT - uses: rzr/fediverse-action@master with: @@ -25,13 +25,13 @@ jobs: Please see the changelog: ${{ github.event.release.html_url }} - ${{ steps.get_topics.outputs.GITHUB_OUTPUT }} #nfcore #openscience #nextflow #bioinformatics + ${{ steps.get_topics.outputs.topics }} #nfcore #openscience #nextflow #bioinformatics send-tweet: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 with: python-version: "3.10" - name: Install dependencies diff --git a/.nf-core.yml b/.nf-core.yml index 59995351..da47f3f0 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -12,3 +12,4 @@ lint: - params.report_logo_img - params.config_profile_url repository_type: pipeline +nf_core_version: "2.14.1" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af57081f..4dc0f1dc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,9 @@ repos: rev: "v3.1.0" hooks: - id: prettier + additional_dependencies: + - prettier@3.2.5 + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python rev: "2.7.3" hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md index 42b92beb..a9cf4c5e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,26 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). +## [4.1.0] - + +### `Added` + +- [#335](https://github.com/nf-core/airrflow/pull/319) Update to nf-core template 2.14.1 + +### `Fixed` + +- [#335](https://github.com/nf-core/airrflow/pull/335) Fix report number of sequences per sample plots. + +### `Dependencies` + +| Dependency | Old version | New version | +| ---------- | ----------- | ----------- | +| biopython | 1.71 | 1.81 | +| enchantr | 0.1.15 | 0.1.16 | +| scoper | 1.2.1 | 1.3.0 | +| dowser | 1.2.0 | 2.1.0 | +| igphyml | 1.1.5 | 2.0.0 | + ## [4.0] - 2024-04-22 Ascendio ### `Added` diff --git a/README.md b/README.md index ef1fd759..28a2428b 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) -[![Launch on Seqera Platform](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Seqera%20Platform-%234256e7)](https://tower.nf/launch?pipeline=https://github.com/nf-core/airrflow) +[![Launch on Seqera Platform](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Seqera%20Platform-%234256e7)](https://cloud.seqera.io/launch?pipeline=https://github.com/nf-core/airrflow) [![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23airrflow-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/airrflow) [![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core) [![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core) diff --git a/assets/repertoire_comparison.Rmd b/assets/repertoire_comparison.Rmd index d3f83e75..b16a9cf4 100644 --- a/assets/repertoire_comparison.Rmd +++ b/assets/repertoire_comparison.Rmd @@ -106,8 +106,9 @@ The full table can be found under [Table_sequences_assembled](repertoire_compari ```{r seq_assembled, echo=FALSE, warning=FALSE, results='asis'} tab_seqs_assembled <- read.table("./Table_sequences_assembled.tsv", header=TRUE, sep="\t", check.names = FALSE) +# Splitting on last underscore for sample_id if (any(is.na(tab_seqs_assembled$sample_id))) { - tab_seqs_assembled$sample_id <- sapply(tab_seqs_assembled$file_0, function(x) unlist(strsplit(as.character(x), "_"))[1]) + tab_seqs_assembled$sample_id <- sapply(tab_seqs_assembled$file_0, function(x) unlist(strsplit(as.character(x), "_\\s*(?=[^_]+$)", perl=TRUE))[1]) } dat <- tab_seqs_assembled %>% diff --git a/assets/tutorial/airrflow.sh b/assets/tutorial/airrflow.sh new file mode 100644 index 00000000..34022df7 --- /dev/null +++ b/assets/tutorial/airrflow.sh @@ -0,0 +1,9 @@ +nextflow run nf-core/airrflow -r 4.1.0 \ +-profile docker \ +--mode assembled \ +--input samplesheet.tsv \ +--outdir results \ +-w work \ +--max_cpus 12 \ +--max_memory 12.GB \ +--skip_multiqc diff --git a/assets/tutorial/samplesheet.tsv b/assets/tutorial/samplesheet.tsv new file mode 100644 index 00000000..69ca3c89 --- /dev/null +++ b/assets/tutorial/samplesheet.tsv @@ -0,0 +1,6 @@ +filename species subject_id sample_id day tissue sex age biomaterial_provider pcr_target_locus single_cell +https://zenodo.org/records/11373741/files/AIRR_subject1_FNA_d0_1_Y1.tsv human S1 S1_d0 d0 PMBC NA NA NA IG TRUE +https://zenodo.org/records/11373741/files/AIRR_subject1_FNA_d12_3_Y1.tsv human S1 S1_d12 d12 PMBC NA NA NA IG TRUE +https://zenodo.org/records/11373741/files/AIRR_subject2_FNA_d0_1_Y1.tsv human S2 S2_d0 d0 PMBC NA NA NA IG TRUE +https://zenodo.org/records/11373741/files/AIRR_subject2_FNA_d12_2_Y1.tsv human S2 S2_d12 d12 PMBC NA NA NA IG TRUE + diff --git a/conf/base.config b/conf/base.config index 4f304f7e..6606bdac 100644 --- a/conf/base.config +++ b/conf/base.config @@ -61,7 +61,4 @@ process { cpus = { check_max( 16 * task.attempt, 'cpus' ) } memory = { check_max( 72.GB * task.attempt, 'memory' ) } } - withName:CUSTOM_DUMPSOFTWAREVERSIONS { - cache = false - } } diff --git a/conf/modules.config b/conf/modules.config index 3bfdc09f..3dc63fa9 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -18,14 +18,6 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] - withName: CUSTOM_DUMPSOFTWAREVERSIONS { - publishDir = [ - path: { "${params.outdir}/pipeline_info" }, - mode: params.publish_dir_mode, - pattern: '*_versions.yml' - ] - } - // Validate input raw withName: SAMPLESHEET_CHECK { publishDir = [ diff --git a/conf/test.config b/conf/test.config index 4d6b1c40..af9618ce 100644 --- a/conf/test.config +++ b/conf/test.config @@ -20,11 +20,11 @@ params { max_time = '6.h' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/Metadata_test_airr.tsv' - cprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/C_primers.fasta' - vprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/V_primers.fasta' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-bcr/Metadata_test_airr.tsv' + cprimers = pipelines_testdata_base_path + 'testdata-bcr/C_primers.fasta' + vprimers = pipelines_testdata_base_path + 'testdata-bcr/V_primers.fasta' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' mode = 'fastq' diff --git a/conf/test_10x_sc.config b/conf/test_10x_sc.config index 76936ef9..e9b9e5a8 100644 --- a/conf/test_10x_sc.config +++ b/conf/test_10x_sc.config @@ -23,6 +23,6 @@ params { // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-sc/10x_sc_raw.tsv' - reference_10x = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-sc/refdata-cellranger-vdj-GRCh38-alts-ensembl-5.0.0.tar.gz' + input = pipelines_testdata_base_path + 'testdata-sc/10x_sc_raw.tsv' + reference_10x = pipelines_testdata_base_path + 'testdata-sc/refdata-cellranger-vdj-GRCh38-alts-ensembl-5.0.0.tar.gz' } diff --git a/conf/test_assembled_hs.config b/conf/test_assembled_hs.config index bb6caa19..9e3d7f5b 100644 --- a/conf/test_assembled_hs.config +++ b/conf/test_assembled_hs.config @@ -18,9 +18,9 @@ params { // Input data mode = 'assembled' - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-reveal/test_assembled_metadata_hs.tsv' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-reveal/test_assembled_metadata_hs.tsv' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' reassign = true productive_only = true diff --git a/conf/test_assembled_immcantation_devel_hs.config b/conf/test_assembled_immcantation_devel_hs.config index da5c8d56..c842e002 100644 --- a/conf/test_assembled_immcantation_devel_hs.config +++ b/conf/test_assembled_immcantation_devel_hs.config @@ -18,9 +18,9 @@ params { // Input data mode = 'assembled' - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-reveal/test_assembled_metadata_hs.tsv' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-reveal/test_assembled_metadata_hs.tsv' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' reassign = true productive_only = true diff --git a/conf/test_assembled_immcantation_devel_mm.config b/conf/test_assembled_immcantation_devel_mm.config index 33fd5bcb..e148abe8 100644 --- a/conf/test_assembled_immcantation_devel_mm.config +++ b/conf/test_assembled_immcantation_devel_mm.config @@ -18,9 +18,9 @@ params { // Input data mode = 'assembled' - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-reveal/test_assembled_metadata_mm.tsv' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-reveal/test_assembled_metadata_mm.tsv' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' reassign = true productive_only = true diff --git a/conf/test_assembled_mm.config b/conf/test_assembled_mm.config index 69ad5052..43e8275e 100644 --- a/conf/test_assembled_mm.config +++ b/conf/test_assembled_mm.config @@ -18,9 +18,9 @@ params { // Input data mode = 'assembled' - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-reveal/test_assembled_metadata_mm.tsv' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-reveal/test_assembled_metadata_mm.tsv' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' reassign = true productive_only = true diff --git a/conf/test_clontech_umi.config b/conf/test_clontech_umi.config index 1d64ad1c..e1bf317e 100644 --- a/conf/test_clontech_umi.config +++ b/conf/test_clontech_umi.config @@ -21,10 +21,10 @@ params { max_time = '6.h' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-clontech/samplesheet.tsv' + input = pipelines_testdata_base_path +'testdata-clontech/samplesheet.tsv' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' clonal_threshold = 0.1 diff --git a/conf/test_fetchimgt.config b/conf/test_fetchimgt.config index cc6608f7..468bb52d 100644 --- a/conf/test_fetchimgt.config +++ b/conf/test_fetchimgt.config @@ -20,9 +20,9 @@ params { max_time = '6.h' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/Metadata_test_airr.tsv' - cprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/C_primers.fasta' - vprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/V_primers.fasta' + input = pipelines_testdata_base_path + 'testdata-bcr/Metadata_test_airr.tsv' + cprimers = pipelines_testdata_base_path + 'testdata-bcr/C_primers.fasta' + vprimers = pipelines_testdata_base_path + 'testdata-bcr/V_primers.fasta' fetch_imgt = true mode = 'fastq' diff --git a/conf/test_full.config b/conf/test_full.config index 0ac79d53..9c652c8f 100644 --- a/conf/test_full.config +++ b/conf/test_full.config @@ -15,11 +15,11 @@ params { config_profile_description = 'Full test dataset to check pipeline function' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/metadata_pcr_umi_airr_300.tsv' + input = pipelines_testdata_base_path + 'testdata-bcr/metadata_pcr_umi_airr_300.tsv' cprimers = 's3://ngi-igenomes/test-data/airrflow/pcr_umi/cprimers.fasta' vprimers = 's3://ngi-igenomes/test-data/airrflow/pcr_umi/vprimers.fasta' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' lineage_trees = true diff --git a/conf/test_nebnext_umi.config b/conf/test_nebnext_umi.config index 76c9bbea..b26fa7cd 100644 --- a/conf/test_nebnext_umi.config +++ b/conf/test_nebnext_umi.config @@ -22,10 +22,10 @@ params { max_time = '6.h' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-neb/samplesheet.tsv' + input = pipelines_testdata_base_path + 'testdata-neb/samplesheet.tsv' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' clonal_threshold = 0.1 diff --git a/conf/test_no_umi.config b/conf/test_no_umi.config index 8800b20c..3a4fad63 100644 --- a/conf/test_no_umi.config +++ b/conf/test_no_umi.config @@ -27,11 +27,11 @@ params { isotype_column = 'c_primer' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-no-umi/Metadata_test-no-umi_airr.tsv' - cprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-no-umi/Greiff2014_CPrimers.fasta' - vprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-no-umi/Greiff2014_VPrimers.fasta' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-no-umi/Metadata_test-no-umi_airr.tsv' + cprimers = pipelines_testdata_base_path + 'testdata-no-umi/Greiff2014_CPrimers.fasta' + vprimers = pipelines_testdata_base_path + 'testdata-no-umi/Greiff2014_VPrimers.fasta' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' } diff --git a/conf/test_nocluster.config b/conf/test_nocluster.config index aabccb9b..4a3fb0b7 100644 --- a/conf/test_nocluster.config +++ b/conf/test_nocluster.config @@ -20,11 +20,11 @@ params { max_time = '6.h' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/Metadata_test_airr.tsv' - cprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/C_primers.fasta' - vprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/V_primers.fasta' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-bcr/Metadata_test_airr.tsv' + cprimers = pipelines_testdata_base_path + 'testdata-bcr/C_primers.fasta' + vprimers = pipelines_testdata_base_path + 'testdata-bcr/V_primers.fasta' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' mode = 'fastq' diff --git a/conf/test_raw_immcantation_devel.config b/conf/test_raw_immcantation_devel.config index 11b8ff69..567629a9 100644 --- a/conf/test_raw_immcantation_devel.config +++ b/conf/test_raw_immcantation_devel.config @@ -20,12 +20,12 @@ params { max_time = '6.h' // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/Metadata_test_airr.tsv' - cprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/C_primers.fasta' - vprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-bcr/V_primers.fasta' + input = pipelines_testdata_base_path + 'testdata-bcr/Metadata_test_airr.tsv' + cprimers = pipelines_testdata_base_path + 'testdata-bcr/C_primers.fasta' + vprimers = pipelines_testdata_base_path + 'testdata-bcr/V_primers.fasta' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' mode = 'fastq' diff --git a/conf/test_tcr.config b/conf/test_tcr.config index 5af84ee7..7572f099 100644 --- a/conf/test_tcr.config +++ b/conf/test_tcr.config @@ -28,11 +28,11 @@ params { // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-tcr/TCR_metadata_airr.tsv' - cprimers = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-tcr/cprimers.fasta' - race_linker = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/testdata-tcr/linker.fasta' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' + input = pipelines_testdata_base_path + 'testdata-tcr/TCR_metadata_airr.tsv' + cprimers = pipelines_testdata_base_path + 'testdata-tcr/cprimers.fasta' + race_linker = pipelines_testdata_base_path + 'testdata-tcr/linker.fasta' + reference_fasta = pipelines_testdata_base_path + 'database-cache/imgtdb_base.zip' + reference_igblast = pipelines_testdata_base_path + 'database-cache/igblast_base.zip' } diff --git a/docs/usage.md b/docs/usage.md index 69b7696b..c3844547 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -14,28 +14,27 @@ The nf-core/airrflow pipeline allows processing BCR and TCR targeted sequencing ### Quickstart -A typical command for running the pipeline for **bulk raw fastq files** is: +A typical command for running the pipeline for **bulk raw fastq files** using available pre-set protocol profiles is shown below. The full list of supported profiles can be found in the section [Supported protocol profiles](#supported-protocol-profiles). ```bash nextflow run nf-core/airrflow \ --profile \ ---mode fastq \ +-profile nebnext_umi_bcr,docker \ --input input_samplesheet.tsv \ ---library_generation_method specific_pcr_umi \ ---cprimers CPrimers.fasta \ ---vprimers VPrimers.fasta \ ---umi_length 12 \ ---umi_position R1 \ --outdir results ``` -You can optionally set a protocol profile if you're running the pipeline with data from one of the supported profiles. The full list of supported profiles can be found in the section [Supported protocol profiles](#supported-protocol-profiles). An example command running the NEBNext UMI protocol profile with docker containers is: +It is also possible to process custom sequencing protocols with custom primers by manually specifying the primers, UMI length (if available) and position: ```bash nextflow run nf-core/airrflow \ --profile nebnext_umi,docker \ +-profile \ --mode fastq \ --input input_samplesheet.tsv \ +--library_generation_method specific_pcr_umi \ +--cprimers CPrimers.fasta \ +--vprimers VPrimers.fasta \ +--umi_length 12 \ +--umi_position R1 \ --outdir results ``` @@ -93,7 +92,6 @@ with `params.yaml` containing: ```yaml input: './samplesheet.csv' outdir: './results/' -genome: 'GRCh37' <...> ``` @@ -230,18 +228,29 @@ This profile executes the commands based on the pRESTO pre-set pipeline [presto- - Align and annotate the internal C Region (for the BCR specific protocol) for a more specific isotype annotation. - Remove duplicate sequences and filter to sequences with at least 2 supporting sources. -Please note that the default primer sequences and internal CRegion sequences are for human. If you wish to run this protocol on mouse or other species, please provide the alternative primers: +Please note that the default primer sequences and internal CRegion sequences are for human. If you wish to run this protocol on mouse or other species, please provide the alternative primers. Here is an example using the mouse IG primers from the Immcantation Bitbucket repository: ```bash nextflow run nf-core/airrflow -r \ -profile nebnext_umi_bcr,docker \ --input input_samplesheet.tsv \ ---cprimers \ ---internal_cregion_sequences \ +--cprimers https://bitbucket.org/kleinstein/immcantation/raw/354f49228a43b4c2858d67fb09886126b314e317/protocols/AbSeq/AbSeq_R1_Mouse_IG_Primers.fasta \ +--internal_cregion_sequences https://bitbucket.org/kleinstein/immcantation/raw/354f49228a43b4c2858d67fb09886126b314e317/protocols/AbSeq/AbSeq_Mouse_IG_InternalCRegion.fasta \ --outdir results ``` -### Clontech / Takara SMARTer Human BCR Profiling kit +And similarly for TCR libraries: + +```bash +nextflow run nf-core/airrflow -r \ +-profile nebnext_umi_bcr,docker \ +--input input_samplesheet.tsv \ +--cprimers https://bitbucket.org/kleinstein/immcantation/raw/354f49228a43b4c2858d67fb09886126b314e317/protocols/AbSeq/AbSeq_R1_Mouse_TR_Primers.fasta \ +--internal_cregion_sequences https://bitbucket.org/kleinstein/immcantation/raw/354f49228a43b4c2858d67fb09886126b314e317/protocols/AbSeq/AbSeq_Mouse_TR_InternalCRegion.fasta \ +--outdir results +``` + +### Clontech / Takara SMARTer Human BCR/TCR Profiling kit - [TaKaRa SMARTer Human BCR kit](https://www.takarabio.com/products/next-generation-sequencing/immune-profiling/human-repertoire/human-bcr-profiling-kit-for-illumina-sequencing) @@ -266,13 +275,23 @@ This profile executes the sequence assembly commands based on the pRESTO pre-set After the sequence assembly steps, the remaining steps are common for all protocols. -Please note that the default primer sequences and internal CRegion sequences are for human. If you wish to run this protocol on mouse or other species, please provide the alternative primer sequences: +Please note that the default primer sequences and internal CRegion sequences are for human. If you wish to run this protocol on mouse or other species, please provide the alternative primer sequences. Here is an example using the mouse IG primers from the Immcantation Bitbucket repository: ```bash nextflow run nf-core/airrflow -r \ -profile clontech_umi_bcr,docker \ --input input_samplesheet.tsv \ ---cprimers \ +--cprimers https://bitbucket.org/kleinstein/immcantation/raw/c98269b194e9c6262fe3b098be3600ba7f64b85c/protocols/Universal/Mouse_IG_CRegion_RC.fasta \ +--outdir results +``` + +And for TCR data: + +```bash +nextflow run nf-core/airrflow -r \ +-profile clontech_umi_tcr,docker \ +--input input_samplesheet.tsv \ +--cprimers https://bitbucket.org/kleinstein/immcantation/raw/c98269b194e9c6262fe3b098be3600ba7f64b85c/protocols/Universal/Mouse_TR_CRegion_RC.fasta \ --outdir results ``` @@ -281,18 +300,18 @@ nextflow run nf-core/airrflow -r \ When processing bulk sequencing data departing from raw `fastq` reads, several sequencing protocols are supported which can be provided with the parameter `--library_generation_method`. The following table matches the library generation methods as described in the [AIRR metadata annotation guidelines](https://docs.airr-community.org/en/stable/miairr/metadata_guidelines.html#library-generation-method) to the value that can be provided to the `--library_generation_method` parameter. -| Library generation methods (AIRR) | Description | Name in pipeline | Commercial protocols | -| --------------------------------- | ------------------------------------------------------------------------------------------ | ---------------- | ----------------------------------------- | -| RT(RHP)+PCR | RT-PCR using random hexamer primers | Not supported | | -| RT(oligo-dT)+PCR | RT-PCR using oligo-dT primers | Not supported | | -| RT(oligo-dT)+TS+PCR | 5’-RACE PCR (i.e. RT is followed by a template switch (TS) step) using oligo-dT primers | dt_5p_race | | -| RT(oligo-dT)+TS(UMI)+PCR | 5’-RACE PCR using oligo-dT primers and template switch primers containing UMI | dt_5p_race_umi | TAKARA SMARTer TCR v2, TAKARA SMARTer BCR | -| RT(specific)+PCR | RT-PCR using transcript-specific primers | specific_pcr | | -| RT(specific)+TS+PCR | 5’-RACE PCR using transcript- specific primers | Not supported | | -| RT(specific)+TS(UMI)+PCR | 5’-RACE PCR using transcript- specific primers and template switch primers containing UMIs | Not supported | | -| RT(specific+UMI)+PCR | RT-PCR using transcript-specific primers containing UMIs | specific_pcr_umi | | -| RT(specific+UMI)+TS+PCR | 5’-RACE PCR using transcript- specific primers containing UMIs | Not supported | | -| RT(specific)+TS | RT-based generation of dsDNA without subsequent PCR. This is used by RNA-seq kits. | Not supported | | +| Library generation methods (AIRR) | Description | Name in pipeline | +| --------------------------------- | ------------------------------------------------------------------------------------------ | ---------------- | +| RT(RHP)+PCR | RT-PCR using random hexamer primers | Not supported | +| RT(oligo-dT)+PCR | RT-PCR using oligo-dT primers | Not supported | +| RT(oligo-dT)+TS+PCR | 5’-RACE PCR (i.e. RT is followed by a template switch (TS) step) using oligo-dT primers | dt_5p_race | +| RT(oligo-dT)+TS(UMI)+PCR | 5’-RACE PCR using oligo-dT primers and template switch primers containing UMI | dt_5p_race_umi | +| RT(specific)+PCR | RT-PCR using transcript-specific primers | specific_pcr | +| RT(specific)+TS+PCR | 5’-RACE PCR using transcript- specific primers | Not supported | +| RT(specific)+TS(UMI)+PCR | 5’-RACE PCR using transcript- specific primers and template switch primers containing UMIs | Not supported | +| RT(specific+UMI)+PCR | RT-PCR using transcript-specific primers containing UMIs | specific_pcr_umi | +| RT(specific+UMI)+TS+PCR | 5’-RACE PCR using transcript- specific primers containing UMIs | Not supported | +| RT(specific)+TS | RT-based generation of dsDNA without subsequent PCR. This is used by RNA-seq kits. | Not supported | ### Multiplex specific PCR (with or without UMI) @@ -405,68 +424,6 @@ nextflow run nf-core/airrflow -profile docker \ --outdir ./results ``` -### dT-Oligo RT and 5'RACE PCR - -This sequencing type requires setting `--library_generation_method race_5p_umi` or `--library_generation_method race_5p_umi` if UMIs are not being employed, and providing sequences for the C-region primers as well as the linker or template switch oligo sequences with the parameter `--race_linker`. Examples are provided below to run airrflow to process amplicons generated with the TAKARA 5'RACE SMARTer Human BCR and TCR protocols (library structure schema shown below). - -#### Takara Bio SMARTer Human BCR - -The read configuration when sequencing with the TAKARA Bio SMARTer Human BCR protocol is the following: - -![nf-core/airrflow](images/TAKARA_RACE_BCR.png) - -```bash -nextflow run nf-core/airrflow -profile docker \ ---input samplesheet.tsv \ ---library_generation_method dt_5p_race_umi \ ---cprimers CPrimers.fasta \ ---race_linker linker.fasta \ ---umi_length 12 \ ---umi_position R2 \ ---cprimer_start 7 \ ---cprimer_position R1 \ ---outdir ./results -``` - -#### Takara Bio SMARTer Human TCR v2 - -The read configuration when sequencing with the Takara Bio SMARTer Human TCR v2 protocol is the following: - -![nf-core/airrflow](images/TAKARA_RACE_TCR.png) - -```bash -nextflow run nf-core/airrflow -profile docker \ ---input samplesheet.tsv \ ---library_generation_method dt_5p_race_umi \ ---cprimers CPrimers.fasta \ ---race_linker linker.fasta \ ---umi_length 12 \ ---umi_position R2 \ ---cprimer_start 5 \ ---cprimer_position R1 \ ---outdir ./results -``` - -For this protocol, the takara linkers are: - -```txt ->takara-linker -GTAC -``` - -And the C-region primers are: - -```txt ->TRAC -CAGGGTCAGGGTTCTGGATATN ->TRBC -GGAACACSTTKTTCAGGTCCTC ->TRDC -GTTTGGTATGAGGCTGACTTCN ->TRGC -CATCTGCATCAAGTTGTTTATC -``` - ## UMI barcode handling Unique Molecular Identifiers (UMIs) enable the quantification of BCR or TCR abundance in the original sample by allowing to distinguish PCR duplicates from original sample duplicates. @@ -549,6 +506,8 @@ If `-profile` is not specified, the pipeline will run locally and expect all sof - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) - `apptainer` - A generic configuration profile to be used with [Apptainer](https://apptainer.org/) +- `wave` + - A generic configuration profile to enable [Wave](https://seqera.io/wave/) containers. Use together with one of the above (requires Nextflow ` 24.03.0-edge` or later). - `conda` - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter, Charliecloud, or Apptainer. diff --git a/modules.json b/modules.json index 9350ace5..3a6e053c 100644 --- a/modules.json +++ b/modules.json @@ -27,7 +27,7 @@ }, "fastqc": { "branch": "master", - "git_sha": "f4ae1d942bd50c5c0b9bd2de1393ce38315ba57c", + "git_sha": "285a50500f9e02578d90b3ce6382ea3c30216acd", "installed_by": ["modules"] }, "multiqc": { @@ -46,7 +46,7 @@ }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", + "git_sha": "92de218a329bfc9a9033116eb5f65fd270e72ba3", "installed_by": ["subworkflows"] }, "utils_nfvalidation_plugin": { diff --git a/modules/local/airrflow_report/airrflow_report.nf b/modules/local/airrflow_report/airrflow_report.nf index b4422153..990e492e 100644 --- a/modules/local/airrflow_report/airrflow_report.nf +++ b/modules/local/airrflow_report/airrflow_report.nf @@ -5,9 +5,7 @@ process AIRRFLOW_REPORT { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: tuple val(meta), path(tab) // sequence tsv table in AIRR format diff --git a/modules/local/enchantr/collapse_duplicates.nf b/modules/local/enchantr/collapse_duplicates.nf index 903824fe..618491ab 100644 --- a/modules/local/enchantr/collapse_duplicates.nf +++ b/modules/local/enchantr/collapse_duplicates.nf @@ -7,9 +7,7 @@ process COLLAPSE_DUPLICATES { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: tuple val(meta), path(tabs) // tuple [val(meta), sequence tsv in AIRR format ] diff --git a/modules/local/enchantr/define_clones.nf b/modules/local/enchantr/define_clones.nf index 64b8e7df..ae71b900 100644 --- a/modules/local/enchantr/define_clones.nf +++ b/modules/local/enchantr/define_clones.nf @@ -24,9 +24,7 @@ process DEFINE_CLONES { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: tuple val(meta), path(tabs) // meta, sequence tsv in AIRR format diff --git a/modules/local/enchantr/detect_contamination.nf b/modules/local/enchantr/detect_contamination.nf index aae3ef92..cec4deca 100644 --- a/modules/local/enchantr/detect_contamination.nf +++ b/modules/local/enchantr/detect_contamination.nf @@ -8,9 +8,7 @@ process DETECT_CONTAMINATION { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: path(tabs) diff --git a/modules/local/enchantr/dowser_lineages.nf b/modules/local/enchantr/dowser_lineages.nf index 03444f19..0559c496 100644 --- a/modules/local/enchantr/dowser_lineages.nf +++ b/modules/local/enchantr/dowser_lineages.nf @@ -24,9 +24,7 @@ process DOWSER_LINEAGES { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: tuple val(meta), path(tabs) diff --git a/modules/local/enchantr/find_threshold.nf b/modules/local/enchantr/find_threshold.nf index 8632e081..4a9b0ab0 100644 --- a/modules/local/enchantr/find_threshold.nf +++ b/modules/local/enchantr/find_threshold.nf @@ -24,9 +24,7 @@ process FIND_THRESHOLD { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: diff --git a/modules/local/enchantr/remove_chimeric.nf b/modules/local/enchantr/remove_chimeric.nf index 94805169..2df7e60c 100644 --- a/modules/local/enchantr/remove_chimeric.nf +++ b/modules/local/enchantr/remove_chimeric.nf @@ -8,9 +8,7 @@ process REMOVE_CHIMERIC { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: diff --git a/modules/local/enchantr/report_file_size.nf b/modules/local/enchantr/report_file_size.nf index 4fc4c3fa..093a9e61 100644 --- a/modules/local/enchantr/report_file_size.nf +++ b/modules/local/enchantr/report_file_size.nf @@ -9,9 +9,7 @@ process REPORT_FILE_SIZE { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: path logs diff --git a/modules/local/enchantr/single_cell_qc.nf b/modules/local/enchantr/single_cell_qc.nf index 49e97796..4170e51b 100644 --- a/modules/local/enchantr/single_cell_qc.nf +++ b/modules/local/enchantr/single_cell_qc.nf @@ -23,9 +23,7 @@ process SINGLE_CELL_QC { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: path(tabs) diff --git a/modules/local/enchantr/validate_input.nf b/modules/local/enchantr/validate_input.nf index db8ab075..6ec12543 100644 --- a/modules/local/enchantr/validate_input.nf +++ b/modules/local/enchantr/validate_input.nf @@ -9,9 +9,7 @@ process VALIDATE_INPUT { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: file samplesheet diff --git a/modules/local/merge_UMI.nf b/modules/local/merge_UMI.nf index a92ef4fe..a90957b1 100644 --- a/modules/local/merge_UMI.nf +++ b/modules/local/merge_UMI.nf @@ -3,10 +3,10 @@ process MERGE_UMI { tag "$meta.id" label 'process_low' - conda "conda-forge::python=3.8.0 conda-forge::biopython=1.74" + conda "conda-forge::biopython=1.81" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-adc9bb9edc31eb38b3c24786a83b7dfa530e2bea:47d6d7765d7537847ced7dac873190d164146022-0' : - 'biocontainers/mulled-v2-adc9bb9edc31eb38b3c24786a83b7dfa530e2bea:47d6d7765d7537847ced7dac873190d164146022-0' }" + 'https://depot.galaxyproject.org/singularity/biopython:1.81' : + 'biocontainers/biopython:1.81' }" input: tuple val(meta), path(R1), path(R2), path(I1) diff --git a/modules/local/rename_fastq.nf b/modules/local/rename_fastq.nf index 012f8f0b..0b8d655b 100644 --- a/modules/local/rename_fastq.nf +++ b/modules/local/rename_fastq.nf @@ -3,10 +3,10 @@ process RENAME_FASTQ { tag "$meta.id" label 'process_low' - conda "conda-forge::python=3.8.0 conda-forge::biopython=1.74" + conda "conda-forge::biopython=1.81" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-adc9bb9edc31eb38b3c24786a83b7dfa530e2bea:47d6d7765d7537847ced7dac873190d164146022-0' : - 'biocontainers/mulled-v2-adc9bb9edc31eb38b3c24786a83b7dfa530e2bea:47d6d7765d7537847ced7dac873190d164146022-0' }" + 'https://depot.galaxyproject.org/singularity/biopython:1.81' : + 'biocontainers/biopython:1.81' }" input: tuple val(meta), path(R1), path(R2) diff --git a/modules/local/rename_file.nf b/modules/local/rename_file.nf index 6f99ef89..786d93e5 100644 --- a/modules/local/rename_file.nf +++ b/modules/local/rename_file.nf @@ -3,19 +3,19 @@ process RENAME_FILE { tag "$meta.id" label 'process_low' - conda "conda-forge::python=3.8.0 conda-forge::biopython=1.74" + conda "conda-forge::biopython=1.81" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-adc9bb9edc31eb38b3c24786a83b7dfa530e2bea:47d6d7765d7537847ced7dac873190d164146022-0' : - 'biocontainers/mulled-v2-adc9bb9edc31eb38b3c24786a83b7dfa530e2bea:47d6d7765d7537847ced7dac873190d164146022-0' }" + 'https://depot.galaxyproject.org/singularity/biopython:1.81' : + 'biocontainers/biopython:1.81' }" input: tuple val(meta), path(file) output: - tuple val(meta), path("${meta.id}_${file.name}") , emit: file + tuple val(meta), path("${meta.id}.${file.extension}") , emit: file script: """ - mv ${file} ${meta.id}_${file.name} + mv ${file} ${meta.id}.${file.extension} """ } diff --git a/modules/local/reveal/add_meta_to_tab.nf b/modules/local/reveal/add_meta_to_tab.nf index 8413cebc..1c50d60e 100644 --- a/modules/local/reveal/add_meta_to_tab.nf +++ b/modules/local/reveal/add_meta_to_tab.nf @@ -6,9 +6,7 @@ process ADD_META_TO_TAB { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" cache 'deep' // Without 'deep' this process would run when using -resume diff --git a/modules/local/reveal/filter_junction_mod3.nf b/modules/local/reveal/filter_junction_mod3.nf index f792aca2..92ef4833 100644 --- a/modules/local/reveal/filter_junction_mod3.nf +++ b/modules/local/reveal/filter_junction_mod3.nf @@ -6,9 +6,7 @@ process FILTER_JUNCTION_MOD3 { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: tuple val(meta), path(tab) // sequence tsv in AIRR format diff --git a/modules/local/reveal/filter_quality.nf b/modules/local/reveal/filter_quality.nf index aa803279..4675ed17 100644 --- a/modules/local/reveal/filter_quality.nf +++ b/modules/local/reveal/filter_quality.nf @@ -6,9 +6,7 @@ process FILTER_QUALITY { if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:4.0.0': - 'docker.io/immcantation/airrflow:4.0.0' }" + container "docker.io/immcantation/airrflow:4.1.0" input: tuple val(meta), path(tab) // sequence tsv in AIRR format diff --git a/modules/nf-core/fastqc/main.nf b/modules/nf-core/fastqc/main.nf index 9e19a74c..d79f1c86 100644 --- a/modules/nf-core/fastqc/main.nf +++ b/modules/nf-core/fastqc/main.nf @@ -25,6 +25,11 @@ process FASTQC { def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } def rename_to = old_new_pairs*.join(' ').join(' ') def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + + def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') + // FastQC memory value allowed range (100 - 10000) + def fastqc_memory = memory_in_mb > 10000 ? 10000 : (memory_in_mb < 100 ? 100 : memory_in_mb) + """ printf "%s %s\\n" $rename_to | while read old_name new_name; do [ -f "\${new_name}" ] || ln -s \$old_name \$new_name @@ -33,6 +38,7 @@ process FASTQC { fastqc \\ $args \\ --threads $task.cpus \\ + --memory $fastqc_memory \\ $renamed_files cat <<-END_VERSIONS > versions.yml diff --git a/nextflow.config b/nextflow.config index 00c53278..e51ff966 100644 --- a/nextflow.config +++ b/nextflow.config @@ -14,7 +14,7 @@ params { mode = "fastq" miairr="$projectDir/assets/reveal/mapping_MiAIRR_BioSample_v1.3.1.tsv" index_file = false - + pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/' // ---------------------------- // sequencing protocol options @@ -81,8 +81,8 @@ params { // ----------------------- productive_only = true reassign = true - reference_igblast = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/igblast_base.zip' - reference_fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/database-cache/imgtdb_base.zip' + reference_igblast = "${params.pipelines_testdata_base_path}database-cache/igblast_base.zip" + reference_fasta = "${params.pipelines_testdata_base_path}database-cache/imgtdb_base.zip" fetch_imgt = false save_databases = true isotype_column = 'c_call' @@ -141,15 +141,15 @@ params { multiqc_methods_description = null // Boilerplate options - outdir = null - publish_dir_mode = 'copy' - email = null - email_on_fail = null - plaintext_email = false - monochrome_logs = false - hook_url = null - help = false - version = false + outdir = null + publish_dir_mode = 'copy' + email = null + email_on_fail = null + plaintext_email = false + monochrome_logs = false + hook_url = null + help = false + version = false // Config options config_profile_name = null @@ -185,51 +185,50 @@ try { } // Load nf-core/airrflow custom profiles from different institutions. -// Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs! -// try { -// includeConfig "${params.custom_config_base}/pipeline/airrflow.config" -// } catch (Exception e) { -// System.err.println("WARNING: Could not load nf-core/config/airrflow profiles: ${params.custom_config_base}/pipeline/airrflow.config") -// } +try { + includeConfig "${params.custom_config_base}/pipeline/airrflow.config" +} catch (Exception e) { + System.err.println("WARNING: Could not load nf-core/config/airrflow profiles: ${params.custom_config_base}/pipeline/airrflow.config") +} profiles { debug { - dumpHashes = true - process.beforeScript = 'echo $HOSTNAME' - cleanup = false + dumpHashes = true + process.beforeScript = 'echo $HOSTNAME' + cleanup = false nextflow.enable.configProcessNamesValidation = true } conda { - conda.enabled = true - docker.enabled = false - singularity.enabled = false - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = false - channels = ['conda-forge', 'bioconda', 'defaults'] - apptainer.enabled = false + conda.enabled = true + docker.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + conda.channels = ['conda-forge', 'bioconda', 'defaults'] + apptainer.enabled = false } mamba { - conda.enabled = true - conda.useMamba = true - docker.enabled = false - singularity.enabled = false - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = false - apptainer.enabled = false + conda.enabled = true + conda.useMamba = true + docker.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + apptainer.enabled = false } docker { - docker.enabled = true - conda.enabled = false - singularity.enabled = false - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = false - apptainer.enabled = false - docker.runOptions = '-u $(id -u):$(id -g)' + docker.enabled = true + conda.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + apptainer.enabled = false + docker.runOptions = '-u $(id -u):$(id -g)' } arm { - docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' } singularity { conda.enabled = false @@ -243,22 +242,22 @@ profiles { apptainer.enabled = false } podman { - podman.enabled = true - conda.enabled = false - docker.enabled = false - singularity.enabled = false - shifter.enabled = false - charliecloud.enabled = false - apptainer.enabled = false + podman.enabled = true + conda.enabled = false + docker.enabled = false + singularity.enabled = false + shifter.enabled = false + charliecloud.enabled = false + apptainer.enabled = false } shifter { - shifter.enabled = true - conda.enabled = false - docker.enabled = false - singularity.enabled = false - podman.enabled = false - charliecloud.enabled = false - apptainer.enabled = false + shifter.enabled = true + conda.enabled = false + docker.enabled = false + singularity.enabled = false + podman.enabled = false + charliecloud.enabled = false + apptainer.enabled = false } charliecloud { conda.enabled = false @@ -271,19 +270,26 @@ profiles { apptainer.enabled = false } apptainer { - apptainer.enabled = true - apptainer.autoMounts = true - conda.enabled = false - docker.enabled = false - singularity.enabled = false - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = false + apptainer.enabled = true + apptainer.autoMounts = true + conda.enabled = false + docker.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + } + wave { + apptainer.ociAutoPull = true + singularity.ociAutoPull = true + wave.enabled = true + wave.freeze = true + wave.strategy = 'conda,container' } gitpod { - executor.name = 'local' - executor.cpus = 4 - executor.memory = 8.GB + executor.name = 'local' + executor.cpus = 4 + executor.memory = 8.GB } test { includeConfig 'conf/test.config' } test_full { includeConfig 'conf/test_full.config' } @@ -368,7 +374,7 @@ manifest { description = """B and T cell repertoire analysis pipeline with the Immcantation framework.""" mainScript = 'main.nf' nextflowVersion = '!>=23.04.0' - version = '4.0' + version = '4.1.0' doi = '10.5281/zenodo.2642009' } diff --git a/nextflow_schema.json b/nextflow_schema.json index cc33f8ff..1c32a276 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -742,6 +742,13 @@ "description": "Validation of parameters in lenient more.", "hidden": true, "help_text": "Allows string values that are parseable as numbers or booleans. For further information see [JSONSchema docs](https://github.com/everit-org/json-schema#lenient-mode)." + }, + "pipelines_testdata_base_path": { + "type": "string", + "fa_icon": "far fa-check-circle", + "description": "Base URL or local path to location of pipeline test dataset files", + "default": "https://raw.githubusercontent.com/nf-core/test-datasets/airrflow/", + "hidden": true } } } diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 56110621..00000000 --- a/pyproject.toml +++ /dev/null @@ -1,15 +0,0 @@ -# Config file for Python. Mostly used to configure linting of bin/*.py with Ruff. -# Should be kept the same as nf-core/tools to avoid fighting with template synchronisation. -[tool.ruff] -line-length = 120 -target-version = "py38" -cache-dir = "~/.cache/ruff" - -[tool.ruff.lint] -select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] - -[tool.ruff.lint.isort] -known-first-party = ["nf_core"] - -[tool.ruff.lint.per-file-ignores] -"__init__.py" = ["E402", "F401"] diff --git a/subworkflows/local/presto_umi.nf b/subworkflows/local/presto_umi.nf index 89e8d5f6..17caa68e 100644 --- a/subworkflows/local/presto_umi.nf +++ b/subworkflows/local/presto_umi.nf @@ -7,28 +7,28 @@ include { FASTP } from '../../modules/n //PRESTO -include { PRESTO_FILTERSEQ as PRESTO_FILTERSEQ_UMI } from '../../modules/local/presto/presto_filterseq' -include { PRESTO_MASKPRIMERS as PRESTO_MASKPRIMERS_UMI } from '../../modules/local/presto/presto_maskprimers' -include { PRESTO_MASKPRIMERS_ALIGN } from '../../modules/local/presto/presto_maskprimers_align' -include { PRESTO_MASKPRIMERS_EXTRACT } from '../../modules/local/presto/presto_maskprimers_extract' -include { PRESTO_MASKPRIMERS_ALIGN as PRESTO_ALIGN_CREGION } from '../../modules/local/presto/presto_maskprimers_align' -include { PRESTO_PAIRSEQ as PRESTO_PAIRSEQ_UMI } from '../../modules/local/presto/presto_pairseq' -include { PRESTO_PAIRSEQ as PRESTO_PAIRSEQ_ALIGN } from '../../modules/local/presto/presto_pairseq' -include { PRESTO_CLUSTERSETS as PRESTO_CLUSTERSETS_UMI } from '../../modules/local/presto/presto_clustersets' -include { PRESTO_PARSE_CLUSTER as PRESTO_PARSE_CLUSTER_UMI } from '../../modules/local/presto/presto_parse_cluster' -include { PRESTO_BUILDCONSENSUS as PRESTO_BUILDCONSENSUS_UMI } from '../../modules/local/presto/presto_buildconsensus' -include { PRESTO_BUILDCONSENSUS as PRESTO_BUILDCONSENSUS_ALIGN } from '../../modules/local/presto/presto_buildconsensus' +include { PRESTO_FILTERSEQ as PRESTO_FILTERSEQ_UMI } from '../../modules/local/presto/presto_filterseq' +include { PRESTO_MASKPRIMERS as PRESTO_MASKPRIMERS_UMI } from '../../modules/local/presto/presto_maskprimers' +include { PRESTO_MASKPRIMERS_ALIGN as PRESTO_ALIGN_PRIMERS } from '../../modules/local/presto/presto_maskprimers_align' +include { PRESTO_MASKPRIMERS_EXTRACT } from '../../modules/local/presto/presto_maskprimers_extract' +include { PRESTO_MASKPRIMERS_ALIGN as PRESTO_ALIGN_CREGION } from '../../modules/local/presto/presto_maskprimers_align' +include { PRESTO_PAIRSEQ as PRESTO_PAIRSEQ_UMI } from '../../modules/local/presto/presto_pairseq' +include { PRESTO_PAIRSEQ as PRESTO_PAIRSEQ_ALIGN } from '../../modules/local/presto/presto_pairseq' +include { PRESTO_CLUSTERSETS as PRESTO_CLUSTERSETS_UMI } from '../../modules/local/presto/presto_clustersets' +include { PRESTO_PARSE_CLUSTER as PRESTO_PARSE_CLUSTER_UMI } from '../../modules/local/presto/presto_parse_cluster' +include { PRESTO_BUILDCONSENSUS as PRESTO_BUILDCONSENSUS_UMI} from '../../modules/local/presto/presto_buildconsensus' +include { PRESTO_BUILDCONSENSUS as PRESTO_BUILDCONSENSUS_ALIGN } from '../../modules/local/presto/presto_buildconsensus' include { PRESTO_POSTCONSENSUS_PAIRSEQ as PRESTO_POSTCONSENSUS_PAIRSEQ_UMI } from '../../modules/local/presto/presto_postconsensus_pairseq' -include { PRESTO_ASSEMBLEPAIRS as PRESTO_ASSEMBLEPAIRS_UMI } from '../../modules/local/presto/presto_assemblepairs' -include { PRESTO_ASSEMBLEPAIRS_SEQUENTIAL } from '../../modules/local/presto/presto_assemblepairs_sequential' +include { PRESTO_ASSEMBLEPAIRS as PRESTO_ASSEMBLEPAIRS_UMI } from '../../modules/local/presto/presto_assemblepairs' +include { PRESTO_ASSEMBLEPAIRS_SEQUENTIAL } from '../../modules/local/presto/presto_assemblepairs_sequential' include { PRESTO_PARSEHEADERS as PRESTO_PARSEHEADERS_COLLAPSE_UMI } from '../../modules/local/presto/presto_parseheaders' -include { PRESTO_PARSEHEADERS as PRESTO_PARSEHEADERS_CREGION } from '../../modules/local/presto/presto_parseheaders' +include { PRESTO_PARSEHEADERS as PRESTO_PARSEHEADERS_CREGION } from '../../modules/local/presto/presto_parseheaders' include { PRESTO_PARSEHEADERS_PRIMERS as PRESTO_PARSEHEADERS_PRIMERS_UMI } from '../../modules/local/presto/presto_parseheaders_primers' include { PRESTO_PARSEHEADERS_METADATA as PRESTO_PARSEHEADERS_METADATA_UMI } from '../../modules/local/presto/presto_parseheaders_metadata' -include { PRESTO_COLLAPSESEQ as PRESTO_COLLAPSESEQ_UMI } from '../../modules/local/presto/presto_collapseseq' -include { PRESTO_COLLAPSESEQ as PRESTO_COLLAPSESEQ_ALIGN } from '../../modules/local/presto/presto_collapseseq' -include { PRESTO_COLLAPSESEQ as PRESTO_COLLAPSESEQ_CREGION } from '../../modules/local/presto/presto_collapseseq' -include { PRESTO_SPLITSEQ as PRESTO_SPLITSEQ_UMI} from '../../modules/local/presto/presto_splitseq' +include { PRESTO_COLLAPSESEQ as PRESTO_COLLAPSESEQ_UMI } from '../../modules/local/presto/presto_collapseseq' +include { PRESTO_COLLAPSESEQ as PRESTO_COLLAPSESEQ_ALIGN } from '../../modules/local/presto/presto_collapseseq' +include { PRESTO_COLLAPSESEQ as PRESTO_COLLAPSESEQ_CREGION} from '../../modules/local/presto/presto_collapseseq' +include { PRESTO_SPLITSEQ as PRESTO_SPLITSEQ_UMI } from '../../modules/local/presto/presto_splitseq' workflow PRESTO_UMI { @@ -108,7 +108,7 @@ workflow PRESTO_UMI { .map{ reads -> [reads[0], reads[1]] }.dump(tag: 'ch_reads_R1') ch_reads_R2 = PRESTO_FILTERSEQ_UMI.out.reads .map{ reads -> [reads[0], reads[2]] }.dump(tag: 'ch_reads_R2') - PRESTO_MASKPRIMERS_ALIGN( + PRESTO_ALIGN_PRIMERS( ch_reads_R1, ch_cprimers.collect(), params.primer_maxlen, @@ -119,15 +119,15 @@ workflow PRESTO_UMI { ch_reads_R2 ) - ch_versions = ch_versions.mix(PRESTO_MASKPRIMERS_ALIGN.out.versions) + ch_versions = ch_versions.mix(PRESTO_ALIGN_PRIMERS.out.versions) ch_versions = ch_versions.mix(PRESTO_MASKPRIMERS_EXTRACT.out.versions) // Merge again R1 and R2 by sample ID. - ch_maskprimers_reads_R1 = PRESTO_MASKPRIMERS_ALIGN.out.reads.map{ reads -> [reads[0].id, reads[0], reads[1]]}.dump(tag: 'ch_maskprimers_reads_R1') + ch_maskprimers_reads_R1 = PRESTO_ALIGN_PRIMERS.out.reads.map{ reads -> [reads[0].id, reads[0], reads[1]]}.dump(tag: 'ch_maskprimers_reads_R1') ch_maskprimers_reads_R2 = PRESTO_MASKPRIMERS_EXTRACT.out.reads.map{ reads -> [reads[0].id, reads[0], reads[1]]}.dump(tag: 'ch_maskprimers_reads_R2') ch_maskprimers_reads = ch_maskprimers_reads_R1.join(ch_maskprimers_reads_R2) .map{ it -> [it[1], it[2], it[4]] }.dump(tag: 'ch_maskprimers_reads_after_remerge') - ch_maskprimers_logs = PRESTO_MASKPRIMERS_ALIGN.out.logs + ch_maskprimers_logs = PRESTO_ALIGN_PRIMERS.out.logs ch_maskprimers_logs = ch_maskprimers_logs.mix(PRESTO_MASKPRIMERS_EXTRACT.out.logs) PRESTO_PAIRSEQ_ALIGN( ch_maskprimers_reads ) diff --git a/subworkflows/local/utils_nfcore_airrflow_pipeline/main.nf b/subworkflows/local/utils_nfcore_airrflow_pipeline/main.nf index cbdd9668..ebbb9e90 100644 --- a/subworkflows/local/utils_nfcore_airrflow_pipeline/main.nf +++ b/subworkflows/local/utils_nfcore_airrflow_pipeline/main.nf @@ -122,6 +122,10 @@ workflow PIPELINE_COMPLETION { imNotification(summary_params, hook_url) } } + + workflow.onError { + log.error "Pipeline failed. Please refer to troubleshooting docs: https://nf-co.re/docs/usage/troubleshooting" + } } /* @@ -230,8 +234,16 @@ def methodsDescriptionText(mqc_methods_yaml) { meta["manifest_map"] = workflow.manifest.toMap() // Pipeline DOI - meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" - meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " + if (meta.manifest_map.doi) { + // Using a loop to handle multiple DOIs + // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers + // Removing ` ` since the manifest.doi is a string and not a proper list + def temp_doi_ref = "" + String[] manifest_doi = meta.manifest_map.doi.tokenize(",") + for (String doi_ref: manifest_doi) temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + meta["doi_text"] = temp_doi_ref.substring(0, temp_doi_ref.length() - 2) + } else meta["doi_text"] = "" + meta["nodoi_text"] = meta.manifest_map.doi ? "" : "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " // Tool references meta["tool_citations"] = "" diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index a8b55d6f..14558c39 100644 --- a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -65,9 +65,15 @@ def checkProfileProvided(nextflow_cli_args) { // Citation string for pipeline // def workflowCitation() { + def temp_doi_ref = "" + String[] manifest_doi = workflow.manifest.doi.tokenize(",") + // Using a loop to handle multiple DOIs + // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers + // Removing ` ` since the manifest.doi is a string and not a proper list + for (String doi_ref: manifest_doi) temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + - " ${workflow.manifest.doi}\n\n" + + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + diff --git a/workflows/airrflow.nf b/workflows/airrflow.nf index bc6b7924..4d228544 100644 --- a/workflows/airrflow.nf +++ b/workflows/airrflow.nf @@ -248,12 +248,16 @@ workflow AIRRFLOW { ch_versions = ch_versions.mix( REPERTOIRE_ANALYSIS_REPORTING.out.versions ) ch_versions.dump(tag: "channel_versions") - // - // Collate and save software versions - // - softwareVersionsToYAML(ch_versions) - .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_pipeline_software_mqc_versions.yml', sort: true, newLine: true) - .set { ch_collated_versions } + // + // Collate and save software versions + // + softwareVersionsToYAML(ch_versions) + .collectFile( + storeDir: "${params.outdir}/pipeline_info", + name: 'nf_core_pipeline_software_mqc_versions.yml', + sort: true, + newLine: true + ).set { ch_collated_versions } // MODULE: MultiQC @@ -280,9 +284,11 @@ workflow AIRRFLOW { ch_report_logo.toList() ) multiqc_report = MULTIQC.out.report.toList() + } else { + multiqc_report = Channel.empty() } emit: - multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html + multiqc_report = multiqc_report // channel: /path/to/multiqc_report.html versions = ch_versions // channel: [ path(versions.yml) ] }