diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1fdc8e3066..903e7aa141 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,3 @@ -* @FriederikeHanssen -* @maxulysse +* @FriederikeHanssen @maxulysse +*.nf.test* @nf-core/nf-test +.github/workflows/ @nf-core/a-team diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 361fca04fe..12ed40b678 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,34 +11,40 @@ on: - master - dev +env: + NXF_ANSI_LOG: false + NFTEST_VER: "0.8.1" + # Cancel if a newer run is started concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: - changes: - name: Check for changes + pytest-changes: + name: Check for changes (pytest) runs-on: ubuntu-latest outputs: # Expose matched filters as job 'tags' output variable tags: ${{ steps.filter.outputs.changes }} steps: - uses: actions/checkout@v3 - - uses: dorny/paths-filter@v2 + + - uses: frouioui/paths-filter@main id: filter with: - filters: "tests/config/tags.yml" + filters: "tests/config/pytesttags.yml" + token: "" - test: + pytest: name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }} runs-on: ubuntu-latest - needs: changes - if: needs.changes.outputs.tags != '[]' + needs: pytest-changes + if: needs.pytest-changes.outputs.tags != '[]' strategy: fail-fast: false matrix: - tags: ["${{ fromJson(needs.changes.outputs.tags) }}"] + tags: ["${{ fromJson(needs.pytest-changes.outputs.tags) }}"] profile: ["docker"] # profile: ["docker", "singularity", "conda"] TEST_DATA_BASE: @@ -50,6 +56,7 @@ jobs: NXF_ANSI_LOG: false TEST_DATA_BASE: "${{ github.workspace }}/test-datasets" SENTIEON_LICENSE_BASE64: ${{ secrets.SENTIEON_LICENSE_BASE64 }} + steps: - name: Check out pipeline code uses: actions/checkout@v3 @@ -154,10 +161,136 @@ jobs: !/home/runner/pytest_workflow_*/*/work/conda !/home/runner/pytest_workflow_*/*/work/singularity + nftest-changes: + name: Check for changes (nf-test) + runs-on: ubuntu-latest + outputs: + tags: ${{ steps.filter.outputs.changes }} + + steps: + - uses: actions/checkout@v3 + + - name: Combine all tags.yml files + id: get_tags + run: find . -name "tags.yml" -not -path "./.github/*" -exec cat {} + > .github/tags.yml + + - name: debug + run: cat .github/tags.yml + + - uses: frouioui/paths-filter@main + id: filter + with: + filters: ".github/tags.yml" + token: "" + + nftest: + name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }} + runs-on: ubuntu-latest + needs: nftest-changes + if: needs.nftest-changes.outputs.tags != '[]' + strategy: + fail-fast: false + matrix: + tags: ["${{ fromJson(needs.nftest-changes.outputs.tags) }}"] + profile: ["docker"] + # profile: ["docker", "singularity", "conda"] + TEST_DATA_BASE: + - "test-datasets/data" + NXF_VER: + - "23.04.0" + - "latest-everything" + exclude: + - tags: "bwa/index" + - tags: "bwa/mem" + - tags: "cat/cat" + - tags: "cat/fastq" + - tags: "custom/dumpsoftwareversions" + - tags: "fastp" + - tags: "fastqc" + - tags: "samtools/stats" + - tags: "untar" + env: + NXF_ANSI_LOG: false + TEST_DATA_BASE: "${{ github.workspace }}/test-datasets" + SENTIEON_LICENSE_BASE64: ${{ secrets.SENTIEON_LICENSE_BASE64 }} + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-java@v3 + with: + distribution: "temurin" + java-version: "17" + - name: Setup Nextflow + uses: nf-core/setup-nextflow@v1 + + - name: Cache nf-test installation + id: cache-software + uses: actions/cache@v3 + with: + path: | + /usr/local/bin/nf-test + /home/runner/.nf-test/nf-test.jar + key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest + + - name: Install nf-test + if: steps.cache-software.outputs.cache-hit != 'true' + run: | + wget -qO- https://code.askimed.com/install/nf-test | bash + sudo mv nf-test /usr/local/bin/ + + - name: Setup apptainer + if: matrix.profile == 'singularity' + uses: eWaterCycle/setup-apptainer@main + + - name: Set up Singularity + if: matrix.profile == 'singularity' + run: | + mkdir -p $NXF_SINGULARITY_CACHEDIR + mkdir -p $NXF_SINGULARITY_LIBRARYDIR + + - name: Set up miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + miniconda-version: "latest" + auto-update-conda: true + channels: conda-forge,bioconda,defaults + python-version: ${{ matrix.python-version }} + + - name: Conda setup + run: | + conda clean -a + conda install -n base conda-libmamba-solver + conda config --set solver libmamba + echo $(realpath $CONDA)/condabin >> $GITHUB_PATH + echo $(realpath python) >> $GITHUB_PATH + + # Set up secrets + - name: Set up nextflow secrets + if: env.SENTIEON_LICENSE_BASE64 != null + run: | + nextflow secrets set SENTIEON_LICENSE_BASE64 ${{ secrets.SENTIEON_LICENSE_BASE64 }} + nextflow secrets set SENTIEON_AUTH_MECH_BASE64 ${{ secrets.SENTIEON_AUTH_MECH_BASE64 }} + SENTIEON_ENCRYPTION_KEY=$(echo -n "${{ secrets.ENCRYPTION_KEY_BASE64 }}" | base64 -d) + SENTIEON_LICENSE_MESSAGE=$(echo -n "${{ secrets.LICENSE_MESSAGE_BASE64 }}" | base64 -d) + SENTIEON_AUTH_DATA=$(python3 bin/license_message.py encrypt --key "$SENTIEON_ENCRYPTION_KEY" --message "$SENTIEON_LICENSE_MESSAGE") + SENTIEON_AUTH_DATA_BASE64=$(echo -n "$SENTIEON_AUTH_DATA" | base64 -w 0) + nextflow secrets set SENTIEON_AUTH_DATA_BASE64 $SENTIEON_AUTH_DATA_BASE64 + + # Test the module + - name: Run nf-test + run: | + nf-test test \ + --profile=${{ matrix.profile }} \ + --tag ${{ matrix.tags }} \ + --tap=test.tap \ + --verbose + confirm-pass: runs-on: ubuntu-latest needs: - - test + - pytest + - nftest if: always() steps: - name: All tests ok diff --git a/.gitignore b/.gitignore index e90c12ab54..f3bc0d64f3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,12 @@ *.code-workspace +*.pyc .nextflow* -work/ -data/ -results/ .DS_Store -testing/ +.nf-test* +.nf-test/ +data/ testing* -*.pyc +testing/ test-datasets/ +results/ +work/ diff --git a/.nf-core.yml b/.nf-core.yml index ee325600a3..36189e1fcb 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -5,6 +5,7 @@ lint: - .github/workflows/awsfulltest.yml - conf/modules.config files_unchanged: + - .gitignore - assets/nf-core-sarek_logo_light.png - docs/images/nf-core-sarek_logo_dark.png - docs/images/nf-core-sarek_logo_light.png diff --git a/CHANGELOG.md b/CHANGELOG.md index 07f67f2258..a2918e702c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,25 +5,96 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## dev +## [dev] ### Added +- [#1333](https://github.com/nf-core/sarek/pull/1333) - Back to dev +- [#1335](https://github.com/nf-core/sarek/pull/1335) - Add index computation of `bcftools_annotations`, if not provided + +### Changed + +### Fixed + +- [#1334](https://github.com/nf-core/sarek/pull/1334) - Remove extra v, when reporting tower runs on slack +- [#1335](https://github.com/nf-core/sarek/pull/1335) - Add docs and validation for bcftools annotation parameters + +### Removed + +### Dependencies + +| Dependency | Old version | New version | +| ---------- | ----------- | ----------- | + +### Modules / Subworkflows + +| script | Old name | New name | +| ------ | -------- | -------- | + +### Parameter + +| Old name | New name | +| -------------------------- | ------------------------ | +| bcftools_annotations_index | bcftools_annotations_tbi | + +## [3.4.0](https://github.com/nf-core/sarek/releases/tag/3.4.0) - Pårtetjåkko + +Pårtetjåkko is a mountain in the south of the park. + +### Added + +- [#1113](https://github.com/nf-core/sarek/pull/1113) - Adding CNVkit genemetrics module - [#1193](https://github.com/nf-core/sarek/pull/1193) - Adding support for Sentieon's DnaScope for germline variant-calling including joint-germline +- [#1244](https://github.com/nf-core/sarek/pull/1244) - Add bcf annotate module +- [#1252](https://github.com/nf-core/sarek/pull/1252) - Added NGSCheckMate tool for checking that samples come from the same individual - [#1271](https://github.com/nf-core/sarek/pull/1271) - Back to dev +- [#1288](https://github.com/nf-core/sarek/pull/1288) - Add nf-test continuous integration (but no tests) +- [#1290](https://github.com/nf-core/sarek/pull/1290) - Add nf-test for whole pipeline ### Changed +- [#1278](https://github.com/nf-core/sarek/pull/1278) - Hide sentieon parameters similar to other variant callers - [#1280](https://github.com/nf-core/sarek/pull/1280) - Replacing link to `SentieonDNAscopeModel1.1.model` in Sentieon's S3 with link to same file in igenomes' S3 +- [#1303](https://github.com/nf-core/sarek/pull/1303) - Ressurect vep_version params and changed its scope to pipeline to enable usage for vep loftee plugin +- [#1304](https://github.com/nf-core/sarek/pull/1304) - Update modules +- [#1311](https://github.com/nf-core/sarek/pull/1311) - Update local modules with an `environment.yml` file +- [#1317](https://github.com/nf-core/sarek/pull/1317) - Add new tools to subway map +- [#1325](https://github.com/nf-core/sarek/pull/1325) - Move `sentieon_dnascope_model` params into `igenomes.config` +- [#1325](https://github.com/nf-core/sarek/pull/1325) - Refactor config files +- [#1327](https://github.com/nf-core/sarek/pull/1327) - Update modules to have an conda environment name ### Fixed - [#1277](https://github.com/nf-core/sarek/pull/1277) - Fix null value issue for Mutect2 joint calling +- [#1287](https://github.com/nf-core/sarek/pull/1287) - Adding label `process_single` to local modules +- [#1298](https://github.com/nf-core/sarek/pull/1298) - Fix annotation cache usage +- [#1301](https://github.com/nf-core/sarek/pull/1301) - Fix nf-prov usage +- [#1315](https://github.com/nf-core/sarek/pull/1315) - Avoid clash of configs of `FILTERVARIANTTRANCHES` in the Sentieon-Haplotyper and GATK-Haplotypecaller subworkflows +- [#1318](https://github.com/nf-core/sarek/pull/1218) - Fix writing of params.json on S3 +- [#1324](https://github.com/nf-core/sarek/pull/1324) - Fix various typos & code formatting +- [#1325](https://github.com/nf-core/sarek/pull/1325) - Update bcfannotate tests and related config files +- [#1328](https://github.com/nf-core/sarek/pull/1328) - Fix links to docs in `nextflow_schema.json` and `docs/output.md` +- [#1328](https://github.com/nf-core/sarek/pull/1328) - Add missing icons in `nextflow_schema.json` +- [#1330](https://github.com/nf-core/sarek/pull/1330) - Add SnpEff to full sized tests + +### Removed + +- [#1298](https://github.com/nf-core/sarek/pull/1298) - Remove `--use_annotation_cache_keys` params ### Dependencies | Dependency | Old version | New version | | ---------- | ----------- | ----------- | +| fastqc | 0.11.9 | 0.12.1 | +| multiqc | 1.15 | 1.17 | + +### Modules / Subworkflows + +| script | Old name | New name | +| ----------------------------- | ----------------------------- | ----------------------------- | +| `gatk4spark/applybqsr` | `GATK4_APPLYBQSRSPARK` | `GATK4SPARK_APPLYBQSR` | +| `gatk4spark/baserecalibrator` | `GATK4_BASERECALIBRATORSPARK` | `GATK4SPARK_BASERECALIBRATOR` | +| `gatk4spark/markduplicates` | `GATK4_MARKDUPLICATESSPARK` | `GATK4SPARK_MARKDUPLICATES` | ## [3.3.2](https://github.com/nf-core/sarek/releases/tag/3.3.2) - Ráhpajávvre @@ -33,6 +104,7 @@ Ráhpajávvre is the Lule Sámi spelling of Rapaselet. - [#1246](https://github.com/nf-core/sarek/pull/1246) - Back to dev - [#1259](https://github.com/nf-core/sarek/pull/1259) - nf-prov plugin +- [#1288](https://github.com/nf-core/sarek/pull/1288) - Add nf-test continuous integration. ### Changed diff --git a/README.md b/README.md index 8c1ef5d130..c128e04955 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ Depending on the options and samples provided, the pipeline can currently perfor - `Sentieon Haplotyper` - `Strelka2` - `TIDDIT` -- Variant filtering and annotation (`SnpEff`, `Ensembl VEP`) +- Variant filtering and annotation (`SnpEff`, `Ensembl VEP`, `BCFtools annotate`) - Summarise and represent QC (`MultiQC`)
@@ -131,6 +131,7 @@ We thank the following people for their extensive assistance in the development
- [Francesco Lescai](https://github.com/lescai)
- [Gavin Mackenzie](https://github.com/GCJMackenzie)
- [Gisela Gabernet](https://github.com/ggabernet)
+- [Grant Neilson](https://github.com/grantn5)
- [gulfshores](https://github.com/gulfshores)
- [Harshil Patel](https://github.com/drpatelh)
- [James A. Fellows Yates](https://github.com/jfy133)
diff --git a/assets/slackreport.json b/assets/slackreport.json
index de941028ea..6d735f5f6c 100644
--- a/assets/slackreport.json
+++ b/assets/slackreport.json
@@ -3,7 +3,7 @@
{
"fallback": "Plain-text summary of the attachment.",
"color": "<% if (success) { %>good<% } else { %>danger<%} %>",
- "author_name": "nf-core/sarek v${version} - ${runName}",
+ "author_name": "nf-core/sarek ${version} - ${runName}",
"author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico",
"text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>",
"fields": [
diff --git a/conf/base.config b/conf/base.config
index db1175874f..d371e9407f 100644
--- a/conf/base.config
+++ b/conf/base.config
@@ -21,80 +21,84 @@ process {
// Process-specific resource requirements
// See https://www.nextflow.io/docs/latest/config.html#config-process-selectors
+ withLabel:error_ignore {
+ errorStrategy = 'ignore'
+ }
+ withLabel:error_retry {
+ errorStrategy = 'retry'
+ maxRetries = 2
+ }
withLabel:process_single {
- cpus = { check_max( 1 , 'cpus' ) }
- memory = { check_max( 6.GB * task.attempt, 'memory' ) }
- time = { check_max( 4.h * task.attempt, 'time' ) }
+ cpus = { check_max( 1 , 'cpus' ) }
+ memory = { check_max( 6.GB * task.attempt, 'memory' ) }
+ time = { check_max( 4.h * task.attempt, 'time' ) }
}
withLabel:process_low {
- cpus = { check_max( 2 * task.attempt, 'cpus' ) }
- memory = { check_max( 12.GB * task.attempt, 'memory' ) }
- time = { check_max( 4.h * task.attempt, 'time' ) }
+ cpus = { check_max( 2 * task.attempt, 'cpus' ) }
+ memory = { check_max( 12.GB * task.attempt, 'memory' ) }
+ time = { check_max( 4.h * task.attempt, 'time' ) }
}
withLabel:process_medium {
- cpus = { check_max( 6 * task.attempt, 'cpus' ) }
- memory = { check_max( 36.GB * task.attempt, 'memory' ) }
- time = { check_max( 8.h * task.attempt, 'time' ) }
+ cpus = { check_max( 6 * task.attempt, 'cpus' ) }
+ memory = { check_max( 36.GB * task.attempt, 'memory' ) }
+ time = { check_max( 8.h * task.attempt, 'time' ) }
}
withLabel:process_high {
- cpus = { check_max( 12 * task.attempt, 'cpus' ) }
- memory = { check_max( 72.GB * task.attempt, 'memory' ) }
- time = { check_max( 16.h * task.attempt, 'time' ) }
+ cpus = { check_max( 12 * task.attempt, 'cpus' ) }
+ memory = { check_max( 72.GB * task.attempt, 'memory' ) }
+ time = { check_max( 16.h * task.attempt, 'time' ) }
}
withLabel:process_long {
- time = { check_max( 20.h * task.attempt, 'time' ) }
+ time = { check_max( 20.h * task.attempt, 'time' ) }
}
withLabel:process_high_memory {
- memory = { check_max( 200.GB * task.attempt, 'memory' ) }
- }
- withLabel:error_ignore {
- errorStrategy = 'ignore'
- }
- withLabel:error_retry {
- errorStrategy = 'retry'
- maxRetries = 2
+ memory = { check_max( 200.GB * task.attempt, 'memory' ) }
}
withName: 'UNZIP.*|UNTAR.*|TABIX.*|BUILD_INTERVALS|CREATE_INTERVALS_BED|CUSTOM_DUMPSOFTWAREVERSIONS|VCFTOOLS|BCFTOOLS.*|SAMTOOLS_INDEX' {
- cpus = { check_max( 1 * task.attempt, 'cpus' ) }
- memory = { check_max( 1.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 1 * task.attempt, 'cpus' ) }
+ memory = { check_max( 1.GB * task.attempt, 'memory' ) }
}
withName: 'FASTQC'{
- cpus = { check_max( 4 * task.attempt, 'cpus' ) }
- memory = { check_max( 4.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 4 * task.attempt, 'cpus' ) }
+ memory = { check_max( 4.GB * task.attempt, 'memory' ) }
}
withName: 'FASTP'{
- cpus = { check_max( 12 * task.attempt, 'cpus' ) }
- memory = { check_max( 4.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 12 * task.attempt, 'cpus' ) }
+ memory = { check_max( 4.GB * task.attempt, 'memory' ) }
}
withName: 'BWAMEM1_MEM|BWAMEM2_MEM' {
- cpus = { check_max( 24 * task.attempt, 'cpus' ) }
- memory = { check_max( 30.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 24 * task.attempt, 'cpus' ) }
+ memory = { check_max( 30.GB * task.attempt, 'memory' ) }
+ }
+ withName:'CNVKIT_BATCH' {
+ label = "process_high"
+ memory = { check_max( 36.GB * task.attempt, 'memory' ) }
}
- withName: 'GATK4_MARKDUPLICATES|GATK4_MARKDUPLICATESSPARK' {
- cpus = { check_max( 6 * task.attempt, 'cpus' ) }
- memory = { check_max( 30.GB * task.attempt, 'memory' ) }
+ withName: 'GATK4_MARKDUPLICATES|GATK4SPARK_MARKDUPLICATES' {
+ cpus = { check_max( 6 * task.attempt, 'cpus' ) }
+ memory = { check_max( 30.GB * task.attempt, 'memory' ) }
}
- withName:'GATK4_APPLYBQSR|GATK4_APPLYBQSR_SPARK|GATK4_BASERECALIBRATOR|GATK4_BASERECALIBRATOR_SPARK|GATK4_GATHERBQSRREPORTS'{
- cpus = { check_max( 2 * task.attempt, 'cpus' ) }
- memory = { check_max( 4.GB * task.attempt, 'memory' ) }
+ withName:'GATK4_APPLYBQSR|GATK4SPARK_APPLYBQSR|GATK4_BASERECALIBRATOR|GATK4SPARK_BASERECALIBRATOR|GATK4_GATHERBQSRREPORTS'{
+ cpus = { check_max( 2 * task.attempt, 'cpus' ) }
+ memory = { check_max( 4.GB * task.attempt, 'memory' ) }
}
withName:'MOSDEPTH'{
- cpus = { check_max( 4 * task.attempt, 'cpus' ) }
- memory = { check_max( 4.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 4 * task.attempt, 'cpus' ) }
+ memory = { check_max( 4.GB * task.attempt, 'memory' ) }
}
withName:'STRELKA.*|MANTA.*' {
- cpus = { check_max( 10 * task.attempt, 'cpus' ) }
- memory = { check_max( 8.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 10 * task.attempt, 'cpus' ) }
+ memory = { check_max( 8.GB * task.attempt, 'memory' ) }
}
withName:'SAMTOOLS_CONVERT'{
- memory = { check_max( 4.GB * task.attempt, 'memory' ) }
+ memory = { check_max( 4.GB * task.attempt, 'memory' ) }
}
withName:'GATK4_MERGEVCFS'{
- cpus = { check_max( 2 * task.attempt, 'cpus' ) }
- memory = { check_max( 4.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 2 * task.attempt, 'cpus' ) }
+ memory = { check_max( 4.GB * task.attempt, 'memory' ) }
}
withName: 'MULTIQC' {
- cpus = { check_max( 4 * task.attempt, 'cpus' ) }
- memory = { check_max( 12.GB * task.attempt, 'memory' ) }
+ cpus = { check_max( 4 * task.attempt, 'cpus' ) }
+ memory = { check_max( 12.GB * task.attempt, 'memory' ) }
}
}
diff --git a/conf/igenomes.config b/conf/igenomes.config
index 0e68dfee19..1ae02673d7 100644
--- a/conf/igenomes.config
+++ b/conf/igenomes.config
@@ -35,6 +35,7 @@ params {
known_indels_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh37/Annotation/GATKBundle/{1000G_phase1,Mills_and_1000G_gold_standard}.indels.b37.vcf.gz.tbi"
known_indels_vqsr = '--resource:1000G,known=false,training=true,truth=true,prior=10.0 1000G_phase1.indels.b37.vcf.gz --resource:mills,known=false,training=true,truth=true,prior=10.0 Mills_and_1000G_gold_standard.indels.b37.vcf.gz'
mappability = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh37/Annotation/Control-FREEC/out100m2_hg19.gem"
+ ngscheckmate_bed = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh37/Annotation/NGSCheckMate/SNP_GRCh37_hg19_wChr.bed"
snpeff_db = 87
snpeff_genome = 'GRCh37'
vep_cache_version = 110
@@ -42,43 +43,46 @@ params {
vep_species = 'homo_sapiens'
}
'GATK.GRCh38' {
- ascat_alleles = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/G1000_alleles_hg38.zip"
- ascat_genome = 'hg38'
- ascat_loci = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/G1000_loci_hg38.zip"
- ascat_loci_gc = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/GC_G1000_hg38.zip"
- ascat_loci_rt = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/RT_G1000_hg38.zip"
- bwa = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/BWAIndex/"
- bwamem2 = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/BWAmem2Index/"
- dragmap = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/dragmap/"
- chr_dir = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/Chromosomes"
- cf_chrom_len = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/Length/Homo_sapiens_assembly38.len"
- dbsnp = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/dbsnp_146.hg38.vcf.gz"
- dbsnp_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/dbsnp_146.hg38.vcf.gz.tbi"
- dbsnp_vqsr = '--resource:dbsnp,known=false,training=true,truth=false,prior=2.0 dbsnp_146.hg38.vcf.gz'
- dict = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/WholeGenomeFasta/Homo_sapiens_assembly38.dict"
- fasta = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/WholeGenomeFasta/Homo_sapiens_assembly38.fasta"
- fasta_fai = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/WholeGenomeFasta/Homo_sapiens_assembly38.fasta.fai"
- germline_resource = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/af-only-gnomad.hg38.vcf.gz"
- germline_resource_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/af-only-gnomad.hg38.vcf.gz.tbi"
- intervals = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/intervals/wgs_calling_regions_noseconds.hg38.bed"
- known_snps = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000G_omni2.5.hg38.vcf.gz"
- known_snps_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000G_omni2.5.hg38.vcf.gz.tbi"
- known_snps_vqsr = '--resource:1000G,known=false,training=true,truth=true,prior=10.0 1000G_omni2.5.hg38.vcf.gz'
- known_indels = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/{Mills_and_1000G_gold_standard.indels.hg38,beta/Homo_sapiens_assembly38.known_indels}.vcf.gz"
- known_indels_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/{Mills_and_1000G_gold_standard.indels.hg38,beta/Homo_sapiens_assembly38.known_indels}.vcf.gz.tbi"
- known_indels_vqsr = '--resource:gatk,known=false,training=true,truth=true,prior=10.0 Homo_sapiens_assembly38.known_indels.vcf.gz --resource:mills,known=false,training=true,truth=true,prior=10.0 Mills_and_1000G_gold_standard.indels.hg38.vcf.gz'
- mappability = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/Control-FREEC/out100m2_hg38.gem"
- pon = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000g_pon.hg38.vcf.gz"
- pon_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000g_pon.hg38.vcf.gz.tbi"
- snpeff_db = 105
- snpeff_genome = 'GRCh38'
- vep_cache_version = 110
- vep_genome = 'GRCh38'
- vep_species = 'homo_sapiens'
+ ascat_alleles = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/G1000_alleles_hg38.zip"
+ ascat_genome = 'hg38'
+ ascat_loci = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/G1000_loci_hg38.zip"
+ ascat_loci_gc = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/GC_G1000_hg38.zip"
+ ascat_loci_rt = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/ASCAT/RT_G1000_hg38.zip"
+ bwa = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/BWAIndex/"
+ bwamem2 = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/BWAmem2Index/"
+ cf_chrom_len = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/Length/Homo_sapiens_assembly38.len"
+ chr_dir = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/Chromosomes"
+ dbsnp = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/dbsnp_146.hg38.vcf.gz"
+ dbsnp_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/dbsnp_146.hg38.vcf.gz.tbi"
+ dbsnp_vqsr = '--resource:dbsnp,known=false,training=true,truth=false,prior=2.0 dbsnp_146.hg38.vcf.gz'
+ dict = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/WholeGenomeFasta/Homo_sapiens_assembly38.dict"
+ dragmap = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/dragmap/"
+ fasta = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/WholeGenomeFasta/Homo_sapiens_assembly38.fasta"
+ fasta_fai = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Sequence/WholeGenomeFasta/Homo_sapiens_assembly38.fasta.fai"
+ germline_resource = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/af-only-gnomad.hg38.vcf.gz"
+ germline_resource_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/af-only-gnomad.hg38.vcf.gz.tbi"
+ intervals = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/intervals/wgs_calling_regions_noseconds.hg38.bed"
+ known_indels = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/{Mills_and_1000G_gold_standard.indels.hg38,beta/Homo_sapiens_assembly38.known_indels}.vcf.gz"
+ known_indels_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/{Mills_and_1000G_gold_standard.indels.hg38,beta/Homo_sapiens_assembly38.known_indels}.vcf.gz.tbi"
+ known_indels_vqsr = '--resource:gatk,known=false,training=true,truth=true,prior=10.0 Homo_sapiens_assembly38.known_indels.vcf.gz --resource:mills,known=false,training=true,truth=true,prior=10.0 Mills_and_1000G_gold_standard.indels.hg38.vcf.gz'
+ known_snps = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000G_omni2.5.hg38.vcf.gz"
+ known_snps_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000G_omni2.5.hg38.vcf.gz.tbi"
+ known_snps_vqsr = '--resource:1000G,known=false,training=true,truth=true,prior=10.0 1000G_omni2.5.hg38.vcf.gz'
+ mappability = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/Control-FREEC/out100m2_hg38.gem"
+ ngscheckmate_bed = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/NGSCheckMate/SNP_GRCh38_hg38_wChr.bed"
+ pon = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000g_pon.hg38.vcf.gz"
+ pon_tbi = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000g_pon.hg38.vcf.gz.tbi"
+ sentieon_dnascope_model = "${params.igenomes_base}/Homo_sapiens/GATK/GRCh38/Annotation/Sentieon/SentieonDNAscopeModel1.1.model"
+ snpeff_db = 105
+ snpeff_genome = 'GRCh38'
+ vep_cache_version = 110
+ vep_genome = 'GRCh38'
+ vep_species = 'homo_sapiens'
}
'Ensembl.GRCh37' {
bwa = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/version0.6.0/"
fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa"
+ ngscheckmate_bed = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/NGSCheckMate/SNP_GRCh37_hg19_woChr.bed"
readme = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/README.txt"
snpeff_db = 87
snpeff_genome = 'GRCh37'
@@ -89,6 +93,7 @@ params {
'NCBI.GRCh38' {
bwa = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BWAIndex/version0.6.0/"
fasta = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/WholeGenomeFasta/genome.fa"
+ ngscheckmate_bed ="${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/NGSCheckMate/SNP_GRCh38_hg38_wChr.bed"
snpeff_db = 105
snpeff_genome = 'GRCh38'
vep_cache_version = 110
diff --git a/conf/modules/aligner.config b/conf/modules/aligner.config
index 931f18d6af..5f44e199b0 100644
--- a/conf/modules/aligner.config
+++ b/conf/modules/aligner.config
@@ -16,26 +16,26 @@
process {
if (params.step == 'mapping') {
- withName: "BWAMEM1_MEM" {
- ext.when = { params.aligner == "bwa-mem" }
+ withName: 'BWAMEM1_MEM' {
+ ext.when = { params.aligner == 'bwa-mem' }
}
- withName: "BWAMEM2_MEM" {
- ext.when = { params.aligner == "bwa-mem2" }
+ withName: 'BWAMEM2_MEM' {
+ ext.when = { params.aligner == 'bwa-mem2' }
}
- withName: "DRAGMAP_ALIGN" {
- ext.when = { params.aligner == "dragmap" }
- ext.args = { "--RGSM ${meta.patient}_${meta.sample} --RGID ${meta.read_group}" }
+ withName: 'DRAGMAP_ALIGN' {
+ ext.args = { "--RGSM ${meta.patient}_${meta.sample} --RGID ${meta.read_group}" }
+ ext.when = { params.aligner == 'dragmap' }
}
- withName: "SENTIEON_BWAMEM" {
- ext.when = { params.aligner == "sentieon-bwamem" }
+ withName: 'SENTIEON_BWAMEM' {
+ ext.when = { params.aligner == 'sentieon-bwamem' }
}
- withName: "(BWAMEM.*_MEM|DRAGMAP_ALIGN|SENTIEON_BWAMEM)" {
- ext.prefix = { params.split_fastq > 1 ? "${meta.id}".concat('.').concat(reads.get(0).name.tokenize('.')[0]) : "${meta.id}.sorted" }
- publishDir = [
+ withName: 'BWAMEM.*_MEM|DRAGMAP_ALIGN|SENTIEON_BWAMEM' {
+ ext.prefix = { params.split_fastq > 1 ? "${meta.id}".concat('.').concat(reads.get(0).name.tokenize('.')[0]) : "${meta.id}.sorted" }
+ publishDir = [
mode: params.publish_dir_mode,
path: { "${params.outdir}/preprocessing/" },
pattern: "*bam",
@@ -55,22 +55,22 @@ process {
]
}
- withName: "(BWAMEM.*_MEM|DRAGMAP_ALIGN)" {
+ withName: 'BWAMEM.*_MEM|DRAGMAP_ALIGN' {
// Markduplicates Spark NEEDS name-sorted reads or runtime goes through the roof
// However if it's skipped, reads need to be coordinate-sorted
// Only name sort if Spark for Markduplicates + duplicate marking is not skipped
// Currently SENTIEON_BWAMEM only supports coordinate sorting the reads.
- ext.args2 = { params.use_gatk_spark && params.use_gatk_spark.contains('markduplicates') && (!params.skip_tools || (params.skip_tools && !params.skip_tools.split(',').contains('markduplicates'))) ? '-n' : '' }
+ ext.args2 = { params.use_gatk_spark && params.use_gatk_spark.contains('markduplicates') && (!params.skip_tools || (params.skip_tools && !params.skip_tools.split(',').contains('markduplicates'))) ? '-n' : '' }
}
- withName: "BWAMEM.*_MEM|SENTIEON_BWAMEM" {
+ withName: 'BWAMEM.*_MEM|SENTIEON_BWAMEM' {
// Using -B 3 for tumor samples
- ext.args = { meta.status == 1 ? "-K 100000000 -Y -B 3 -R ${meta.read_group}" : "-K 100000000 -Y -R ${meta.read_group}" }
+ ext.args = { meta.status == 1 ? "-K 100000000 -Y -B 3 -R ${meta.read_group}" : "-K 100000000 -Y -R ${meta.read_group}" }
}
}
withName: 'MERGE_BAM|INDEX_MERGE_BAM' {
- publishDir = [
+ publishDir = [
mode: params.publish_dir_mode,
path: { "${params.outdir}/preprocessing/" },
pattern: "*{bam,bai}",
diff --git a/conf/modules/alignment_to_fastq.config b/conf/modules/alignment_to_fastq.config
index b207e57740..32878e5342 100644
--- a/conf/modules/alignment_to_fastq.config
+++ b/conf/modules/alignment_to_fastq.config
@@ -16,68 +16,68 @@
process {
withName: 'COLLATE_FASTQ_MAP' {
- ext.args2 = '-N'
- ext.prefix = {"${meta.id}.mapped"}
- publishDir = [
+ ext.args2 = { '-N' }
+ ext.prefix = { "${meta.id}.mapped" }
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
}
withName: 'COLLATE_FASTQ_UNMAP' {
- ext.args2 = '-N'
- ext.prefix = {"${meta.id}.unmapped"}
- publishDir = [
+ ext.args2 = { '-N' }
+ ext.prefix = { "${meta.id}.unmapped" }
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
}
withName: 'SAMTOOLS_VIEW_MAP_MAP' {
- ext.args = '-b -f1 -F12'
- ext.prefix = {"${meta.id}.map_map"}
- publishDir = [
+ ext.args = { '-b -f1 -F12' }
+ ext.prefix = { "${meta.id}.map_map" }
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
}
withName: 'SAMTOOLS_VIEW_MAP_UNMAP' {
- ext.args = '-b -f8 -F260'
- ext.prefix = {"${meta.id}.map_unmap"}
- publishDir = [
+ ext.args = { '-b -f8 -F260' }
+ ext.prefix = { "${meta.id}.map_unmap" }
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
}
withName: 'SAMTOOLS_VIEW_UNMAP_MAP' {
- ext.args = '-b -f4 -F264'
- ext.prefix = {"${meta.id}.unmap_map"}
- publishDir = [
+ ext.args = { '-b -f4 -F264' }
+ ext.prefix = { "${meta.id}.unmap_map" }
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
}
withName: 'SAMTOOLS_VIEW_UNMAP_UNMAP' {
- ext.args = '-b -f12 -F256'
- ext.prefix = {"${meta.id}.unmap_unmap"}
- publishDir = [
+ ext.args = { '-b -f12 -F256' }
+ ext.prefix = { "${meta.id}.unmap_unmap" }
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
}
withName: 'SAMTOOLS_MERGE_UNMAP' {
- ext.prefix = {"${meta.id}.merged_unmap"}
- publishDir = [
+ ext.prefix = { "${meta.id}.merged_unmap" }
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
}
withName: 'CAT_FASTQ' {
- publishDir = [
+ publishDir = [
//specify to avoid publishing, overwritten otherwise
enabled: false
]
diff --git a/conf/modules/annotate.config b/conf/modules/annotate.config
index bf92ecf7b8..ff046ca843 100644
--- a/conf/modules/annotate.config
+++ b/conf/modules/annotate.config
@@ -18,9 +18,9 @@ process {
// SNPEFF
if (params.tools && (params.tools.split(',').contains('snpeff') || params.tools.split(',').contains('merge'))) {
withName: 'SNPEFF_SNPEFF' {
- ext.prefix = { vcf.baseName - ".vcf" + "_snpEff" }
- ext.args = '-nodownload -canon -v'
- publishDir = [
+ ext.args = { '-nodownload -canon -v' }
+ ext.prefix = { vcf.baseName - '.vcf' + '_snpEff' }
+ publishDir = [
[
mode: params.publish_dir_mode,
path: { "${params.outdir}/reports/snpeff/${meta.variantcaller}/${meta.id}/" },
@@ -34,18 +34,18 @@ process {
// VEP
if (params.tools && (params.tools.split(',').contains('vep') || params.tools.split(',').contains('merge'))) {
withName: 'ENSEMBLVEP_VEP' {
- ext.args = { [
+ ext.args = { [
(params.vep_dbnsfp && params.dbnsfp && !params.dbnsfp_consequence) ? "--plugin dbNSFP,${params.dbnsfp.split("/")[-1]},${params.dbnsfp_fields}" : '',
(params.vep_dbnsfp && params.dbnsfp && params.dbnsfp_consequence) ? "--plugin dbNSFP,'consequence=${params.dbnsfp_consequence}',${params.dbnsfp.split("/")[-1]},${params.dbnsfp_fields}" : '',
- (params.vep_loftee) ? "--plugin LoF,loftee_path:/opt/conda/envs/nf-core-vep-${params.vep_version}/share/ensembl-vep-${params.vep_version}-0" : '',
+ (params.vep_loftee) ? "--plugin LoF,loftee_path:/usr/local/share/ensembl-vep-${params.vep_version}" : '',
(params.vep_spliceai && params.spliceai_snv && params.spliceai_indel) ? "--plugin SpliceAI,snv=${params.spliceai_snv.split("/")[-1]},indel=${params.spliceai_indel.split("/")[-1]}" : '',
(params.vep_spliceregion) ? '--plugin SpliceRegion' : '',
(params.vep_out_format) ? "--${params.vep_out_format}" : '--vcf',
(params.vep_custom_args) ?: ''
].join(' ').trim() }
// If just VEP: Output files for tumor/normal samples
@@ -745,6 +748,8 @@ The file `Output files for all samples
+
+- `{sample,tumorsample_vs_normalsample}.Output files for all samples
+
+**Output directory: `{outdir}/reports/ngscheckmate/`**
+
+- `ngscheckmate_all.txt`
+ - Tab delimited text file listing all the comparisons made, whether they were considered as a match, with the correlation and a normalised depth.
+- `ngscheckmate_matched.txt`
+ - Tab delimited text file listing only the comparison that were considered to match, with the correlation and a normalised depth.
+- `ngscheckmate_output_corr_matrix.txt`
+ - Tab delimited text file containing a matrix of all correlations for all comparisons made.
+- `vcfs/12.922000 K (92.984097%)",
+ "single end (151 cycles)" ]
+ def log_text = [ "Q20 bases: 12922(92.9841%)",
+ "reads passed filter: 99" ]
+ def read_lines = ["@ERR5069949.2151832 NS500628:121:HK3MMAFX2:2:21208:10793:15304/1",
+ "TCATAAACCAAAGCACTCACAGTGTCAACAATTTCAGCAGGACAACGCCGACAAGTTCCGAGGAACATGTCTGGACCTATAGTTTTCATAAGTCTACACACTGAATTGAAATATTCTGGTTCTAGTGTGCCCTTAGTTAGCAATGTGCGT",
+ "AAAAAAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEE 25.719000 K (93.033098%)",
+ "The input has little adapter percentage (~0.000000%), probably it's trimmed before."]
+ def log_text = [ "No adapter detected for read1",
+ "Q30 bases: 12281(88.3716%)"]
+ def json_text = ['"passed_filter_reads": 198']
+ def read1_lines = ["@ERR5069949.2151832 NS500628:121:HK3MMAFX2:2:21208:10793:15304/1",
+ "TCATAAACCAAAGCACTCACAGTGTCAACAATTTCAGCAGGACAACGCCGACAAGTTCCGAGGAACATGTCTGGACCTATAGTTTTCATAAGTCTACACACTGAATTGAAATATTCTGGTTCTAGTGTGCCCTTAGTTAGCAATGTGCGT",
+ "AAAAAAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEE 25.719000 K (93.033098%)",
+ "paired end (151 cycles + 151 cycles)"]
+ def log_text = [ "Q20 bases: 12922(92.9841%)",
+ "reads passed filter: 198"]
+ def read_lines = [ "@ERR5069949.2151832 NS500628:121:HK3MMAFX2:2:21208:10793:15304/1",
+ "TCATAAACCAAAGCACTCACAGTGTCAACAATTTCAGCAGGACAACGCCGACAAGTTCCGAGGAACATGTCTGGACCTATAGTTTTCATAAGTCTACACACTGAATTGAAATATTCTGGTTCTAGTGTGCCCTTAGTTAGCAATGTGCGT",
+ "AAAAAAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEE 12.922000 K (92.984097%)",
+ "single end (151 cycles)"]
+ def log_text = [ "Q20 bases: 12922(92.9841%)",
+ "reads passed filter: 99" ]
+ def read_lines = [ "@ERR5069949.2151832 NS500628:121:HK3MMAFX2:2:21208:10793:15304/1",
+ "TCATAAACCAAAGCACTCACAGTGTCAACAATTTCAGCAGGACAACGCCGACAAGTTCCGAGGAACATGTCTGGACCTATAGTTTTCATAAGTCTACACACTGAATTGAAATATTCTGGTTCTAGTGTGCCCTTAGTTAGCAATGTGCGT",
+ "AAAAAAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEE 25.719000 K (93.033098%)",
+ "The input has little adapter percentage (~0.000000%), probably it's trimmed before."]
+ def log_text = [ "No adapter detected for read1",
+ "Q30 bases: 12281(88.3716%)"]
+ def json_text = ['"passed_filter_reads": 198']
+ def read1_lines = ["@ERR5069949.2151832 NS500628:121:HK3MMAFX2:2:21208:10793:15304/1",
+ "TCATAAACCAAAGCACTCACAGTGTCAACAATTTCAGCAGGACAACGCCGACAAGTTCCGAGGAACATGTCTGGACCTATAGTTTTCATAAGTCTACACACTGAATTGAAATATTCTGGTTCTAGTGTGCCCTTAGTTAGCAATGTGCGT",
+ "AAAAAAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEAEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEAAEEEEE
test.gz ") },
+ { assert snapshot(process.out.versions).match("versions") },
+ { assert process.out.zip.get(0).get(1) ==~ ".*/test_fastqc.zip" }
+ )
+ }
+ }
+}
diff --git a/modules/nf-core/fastqc/tests/main.nf.test.snap b/modules/nf-core/fastqc/tests/main.nf.test.snap
new file mode 100644
index 0000000000..636a32cead
--- /dev/null
+++ b/modules/nf-core/fastqc/tests/main.nf.test.snap
@@ -0,0 +1,10 @@
+{
+ "versions": {
+ "content": [
+ [
+ "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
+ ]
+ ],
+ "timestamp": "2023-10-09T23:40:54+0000"
+ }
+}
\ No newline at end of file
diff --git a/modules/nf-core/fastqc/tests/tags.yml b/modules/nf-core/fastqc/tests/tags.yml
new file mode 100644
index 0000000000..7834294ba0
--- /dev/null
+++ b/modules/nf-core/fastqc/tests/tags.yml
@@ -0,0 +1,2 @@
+fastqc:
+ - modules/nf-core/fastqc/**
diff --git a/modules/nf-core/fgbio/callmolecularconsensusreads/environment.yml b/modules/nf-core/fgbio/callmolecularconsensusreads/environment.yml
new file mode 100644
index 0000000000..1429e478ec
--- /dev/null
+++ b/modules/nf-core/fgbio/callmolecularconsensusreads/environment.yml
@@ -0,0 +1,7 @@
+name: fgbio_callmolecularconsensusreads
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::fgbio=2.0.2
diff --git a/modules/nf-core/fgbio/callmolecularconsensusreads/main.nf b/modules/nf-core/fgbio/callmolecularconsensusreads/main.nf
index 4515040cc9..e9f209ef16 100644
--- a/modules/nf-core/fgbio/callmolecularconsensusreads/main.nf
+++ b/modules/nf-core/fgbio/callmolecularconsensusreads/main.nf
@@ -2,7 +2,7 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::fgbio=2.0.2"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/fgbio:2.0.2--hdfd78af_0' :
'biocontainers/fgbio:2.0.2--hdfd78af_0' }"
diff --git a/modules/nf-core/fgbio/callmolecularconsensusreads/meta.yml b/modules/nf-core/fgbio/callmolecularconsensusreads/meta.yml
index 371825123a..f4a6ab1bb8 100644
--- a/modules/nf-core/fgbio/callmolecularconsensusreads/meta.yml
+++ b/modules/nf-core/fgbio/callmolecularconsensusreads/meta.yml
@@ -1,6 +1,5 @@
name: fgbio_callmolecularconsensusreads
description: Calls consensus sequences from reads with the same unique molecular tag.
-
keywords:
- UMIs
- consensus sequence
@@ -12,7 +11,6 @@ tools:
homepage: https://github.com/fulcrumgenomics/fgbio
documentation: http://fulcrumgenomics.github.io/fgbio/
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -24,7 +22,6 @@ input:
description: |
The input SAM or BAM file.
pattern: "*.{bam,sam}"
-
output:
- meta:
type: map
@@ -40,6 +37,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@sruthipsuresh"
+maintainers:
+ - "@sruthipsuresh"
diff --git a/modules/nf-core/fgbio/fastqtobam/environment.yml b/modules/nf-core/fgbio/fastqtobam/environment.yml
new file mode 100644
index 0000000000..f5f1992581
--- /dev/null
+++ b/modules/nf-core/fgbio/fastqtobam/environment.yml
@@ -0,0 +1,7 @@
+name: fgbio_fastqtobam
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::fgbio=2.0.2
diff --git a/modules/nf-core/fgbio/fastqtobam/main.nf b/modules/nf-core/fgbio/fastqtobam/main.nf
index 74016d3c45..f7302171b8 100644
--- a/modules/nf-core/fgbio/fastqtobam/main.nf
+++ b/modules/nf-core/fgbio/fastqtobam/main.nf
@@ -2,7 +2,7 @@ process FGBIO_FASTQTOBAM {
tag "$meta.id"
label 'process_low'
- conda "bioconda::fgbio=2.0.2"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/fgbio:2.0.2--hdfd78af_0' :
'biocontainers/fgbio:2.0.2--hdfd78af_0' }"
diff --git a/modules/nf-core/fgbio/fastqtobam/meta.yml b/modules/nf-core/fgbio/fastqtobam/meta.yml
index f85485cab8..4b37cd530f 100644
--- a/modules/nf-core/fgbio/fastqtobam/meta.yml
+++ b/modules/nf-core/fgbio/fastqtobam/meta.yml
@@ -10,15 +10,12 @@ tools:
homepage: http://fulcrumgenomics.github.io/fgbio/
documentation: http://fulcrumgenomics.github.io/fgbio/tools/latest/
tool_dev_url: https://github.com/fulcrumgenomics/fgbio
-
licence: ["MIT"]
-
input:
- reads:
type: file
description: pair of reads to be converted into BAM file
pattern: "*.{fastq.gz}"
-
output:
- meta:
type: map
@@ -37,7 +34,9 @@ output:
type: file
description: Unaligned, unsorted CRAM file
pattern: "*.{cram}"
-
authors:
- "@lescai"
- "@matthdsm"
+maintainers:
+ - "@lescai"
+ - "@matthdsm"
diff --git a/modules/nf-core/fgbio/groupreadsbyumi/environment.yml b/modules/nf-core/fgbio/groupreadsbyumi/environment.yml
new file mode 100644
index 0000000000..58e37bf6bd
--- /dev/null
+++ b/modules/nf-core/fgbio/groupreadsbyumi/environment.yml
@@ -0,0 +1,7 @@
+name: fgbio_groupreadsbyumi
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::fgbio=2.0.2
diff --git a/modules/nf-core/fgbio/groupreadsbyumi/main.nf b/modules/nf-core/fgbio/groupreadsbyumi/main.nf
index 879e453452..7179290c91 100644
--- a/modules/nf-core/fgbio/groupreadsbyumi/main.nf
+++ b/modules/nf-core/fgbio/groupreadsbyumi/main.nf
@@ -2,7 +2,7 @@ process FGBIO_GROUPREADSBYUMI {
tag "$meta.id"
label 'process_low'
- conda "bioconda::fgbio=2.0.2"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/fgbio:2.0.2--hdfd78af_0' :
'biocontainers/fgbio:2.0.2--hdfd78af_0' }"
diff --git a/modules/nf-core/fgbio/groupreadsbyumi/meta.yml b/modules/nf-core/fgbio/groupreadsbyumi/meta.yml
index f40a5ac7fc..02ca91f19f 100644
--- a/modules/nf-core/fgbio/groupreadsbyumi/meta.yml
+++ b/modules/nf-core/fgbio/groupreadsbyumi/meta.yml
@@ -16,9 +16,7 @@ tools:
homepage: http://fulcrumgenomics.github.io/fgbio/
documentation: http://fulcrumgenomics.github.io/fgbio/tools/latest/
tool_dev_url: https://github.com/fulcrumgenomics/fgbio
-
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -35,7 +33,6 @@ input:
description: |
Reguired argument: defines the UMI assignment strategy.
Must be chosen among: Identity, Edit, Adjacency, Paired.
-
output:
- meta:
type: map
@@ -54,6 +51,7 @@ output:
type: file
description: A text file containing the tag family size counts
pattern: "*.txt"
-
authors:
- "@lescai"
+maintainers:
+ - "@lescai"
diff --git a/modules/nf-core/freebayes/environment.yml b/modules/nf-core/freebayes/environment.yml
new file mode 100644
index 0000000000..6846080a2f
--- /dev/null
+++ b/modules/nf-core/freebayes/environment.yml
@@ -0,0 +1,7 @@
+name: freebayes
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::freebayes=1.3.6
diff --git a/modules/nf-core/freebayes/main.nf b/modules/nf-core/freebayes/main.nf
index 1466f085e8..8a1c641ded 100644
--- a/modules/nf-core/freebayes/main.nf
+++ b/modules/nf-core/freebayes/main.nf
@@ -2,7 +2,7 @@ process FREEBAYES {
tag "$meta.id"
label 'process_single'
- conda "bioconda::freebayes=1.3.6"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/freebayes:1.3.6--hbfe0e7f_2' :
'biocontainers/freebayes:1.3.6--hbfe0e7f_2' }"
diff --git a/modules/nf-core/freebayes/meta.yml b/modules/nf-core/freebayes/meta.yml
index 17d83cba2b..e2cf1a175c 100644
--- a/modules/nf-core/freebayes/meta.yml
+++ b/modules/nf-core/freebayes/meta.yml
@@ -8,7 +8,6 @@ keywords:
- germline variant calling
- bacterial variant calling
- bayesian
-
tools:
- freebayes:
description: Bayesian haplotype-based polymorphism discovery and genotyping
@@ -17,7 +16,6 @@ tools:
tool_dev_url: https://github.com/freebayes/freebayes
doi: "10.48550/arXiv.1207.3907"
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -60,7 +58,6 @@ input:
or a region-specific format:
seq_name start end sample_name copy_number
pattern: "*.bed"
-
output:
- meta:
type: map
@@ -75,8 +72,11 @@ output:
type: file
description: Compressed VCF file
pattern: "*.vcf.gz"
-
authors:
- "@maxibor"
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@maxibor"
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4/applybqsr/environment.yml b/modules/nf-core/gatk4/applybqsr/environment.yml
new file mode 100644
index 0000000000..a690099123
--- /dev/null
+++ b/modules/nf-core/gatk4/applybqsr/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_applybqsr
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/applybqsr/main.nf b/modules/nf-core/gatk4/applybqsr/main.nf
index e5e6bf99c7..7e49563739 100644
--- a/modules/nf-core/gatk4/applybqsr/main.nf
+++ b/modules/nf-core/gatk4/applybqsr/main.nf
@@ -2,7 +2,7 @@ process GATK4_APPLYBQSR {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/applybqsr/meta.yml b/modules/nf-core/gatk4/applybqsr/meta.yml
index 2085fa97b2..ab9efea3f4 100644
--- a/modules/nf-core/gatk4/applybqsr/meta.yml
+++ b/modules/nf-core/gatk4/applybqsr/meta.yml
@@ -16,7 +16,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -49,7 +48,6 @@ input:
type: file
description: GATK sequence dictionary
pattern: "*.dict"
-
output:
- meta:
type: map
@@ -68,7 +66,9 @@ output:
type: file
description: Recalibrated CRAM file
pattern: "*.{cram}"
-
authors:
- "@yocra3"
- "@FriederikeHanssen"
+maintainers:
+ - "@yocra3"
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/gatk4/applyvqsr/environment.yml b/modules/nf-core/gatk4/applyvqsr/environment.yml
new file mode 100644
index 0000000000..e640768957
--- /dev/null
+++ b/modules/nf-core/gatk4/applyvqsr/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_applyvqsr
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/applyvqsr/main.nf b/modules/nf-core/gatk4/applyvqsr/main.nf
index 8413f2bb3b..21afe9a528 100644
--- a/modules/nf-core/gatk4/applyvqsr/main.nf
+++ b/modules/nf-core/gatk4/applyvqsr/main.nf
@@ -2,7 +2,7 @@ process GATK4_APPLYVQSR {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/applyvqsr/meta.yml b/modules/nf-core/gatk4/applyvqsr/meta.yml
index e1e121a661..de5d6d067a 100644
--- a/modules/nf-core/gatk4/applyvqsr/meta.yml
+++ b/modules/nf-core/gatk4/applyvqsr/meta.yml
@@ -19,7 +19,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -58,7 +57,6 @@ input:
type: file
description: GATK sequence dictionary
pattern: "*.dict"
-
output:
- vcf:
type: file
@@ -72,6 +70,7 @@ output:
type: file
description: File containing software versions.
pattern: "versions.yml"
-
authors:
- "@GCJMackenzie"
+maintainers:
+ - "@GCJMackenzie"
diff --git a/modules/nf-core/gatk4/baserecalibrator/environment.yml b/modules/nf-core/gatk4/baserecalibrator/environment.yml
new file mode 100644
index 0000000000..6863fb1712
--- /dev/null
+++ b/modules/nf-core/gatk4/baserecalibrator/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_baserecalibrator
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/baserecalibrator/main.nf b/modules/nf-core/gatk4/baserecalibrator/main.nf
index 5375289a16..e893b65036 100644
--- a/modules/nf-core/gatk4/baserecalibrator/main.nf
+++ b/modules/nf-core/gatk4/baserecalibrator/main.nf
@@ -2,7 +2,7 @@ process GATK4_BASERECALIBRATOR {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/baserecalibrator/meta.yml b/modules/nf-core/gatk4/baserecalibrator/meta.yml
index db4fecfc92..8252b8c290 100644
--- a/modules/nf-core/gatk4/baserecalibrator/meta.yml
+++ b/modules/nf-core/gatk4/baserecalibrator/meta.yml
@@ -16,7 +16,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -54,7 +53,6 @@ input:
type: file
description: Tabix index of the known_sites (optional)
pattern: "*.vcf.gz.tbi"
-
output:
- meta:
type: map
@@ -69,8 +67,11 @@ output:
type: file
description: Recalibration table from BaseRecalibrator
pattern: "*.{table}"
-
authors:
- "@yocra3"
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@yocra3"
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4/calculatecontamination/environment.yml b/modules/nf-core/gatk4/calculatecontamination/environment.yml
new file mode 100644
index 0000000000..d5e45ebe42
--- /dev/null
+++ b/modules/nf-core/gatk4/calculatecontamination/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_calculatecontamination
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/calculatecontamination/main.nf b/modules/nf-core/gatk4/calculatecontamination/main.nf
index 9dd961bec0..8d43c4ee6b 100644
--- a/modules/nf-core/gatk4/calculatecontamination/main.nf
+++ b/modules/nf-core/gatk4/calculatecontamination/main.nf
@@ -2,7 +2,7 @@ process GATK4_CALCULATECONTAMINATION {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/calculatecontamination/meta.yml b/modules/nf-core/gatk4/calculatecontamination/meta.yml
index 7767bd0807..b0ffe814c5 100644
--- a/modules/nf-core/gatk4/calculatecontamination/meta.yml
+++ b/modules/nf-core/gatk4/calculatecontamination/meta.yml
@@ -17,7 +17,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -32,7 +31,6 @@ input:
type: file
description: File containing the pileups summary table of a normal sample that matches with the tumor sample specified in pileup argument. This is an optional input.
pattern: "*.pileups.table"
-
output:
- contamination:
type: file
@@ -46,7 +44,9 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@GCJMackenzie"
- "@maxulysse"
+maintainers:
+ - "@GCJMackenzie"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4/cnnscorevariants/environment.yml b/modules/nf-core/gatk4/cnnscorevariants/environment.yml
new file mode 100644
index 0000000000..12cc34ba66
--- /dev/null
+++ b/modules/nf-core/gatk4/cnnscorevariants/environment.yml
@@ -0,0 +1,5 @@
+name: gatk4_cnnscorevariants
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/gatk4/cnnscorevariants/meta.yml b/modules/nf-core/gatk4/cnnscorevariants/meta.yml
index a2fe3d47c6..8a9d0f51c2 100644
--- a/modules/nf-core/gatk4/cnnscorevariants/meta.yml
+++ b/modules/nf-core/gatk4/cnnscorevariants/meta.yml
@@ -14,7 +14,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -56,7 +55,6 @@ input:
type: file
description: Keras model HD5 file with neural net weights. (optional)
pattern: "*.hd5"
-
output:
- meta:
type: map
@@ -75,6 +73,7 @@ output:
type: file
description: VCF index file
pattern: "*.vcf.gz.tbi"
-
authors:
- "@FriederikeHanssen"
+maintainers:
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/gatk4/createsequencedictionary/environment.yml b/modules/nf-core/gatk4/createsequencedictionary/environment.yml
new file mode 100644
index 0000000000..db663e148f
--- /dev/null
+++ b/modules/nf-core/gatk4/createsequencedictionary/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_createsequencedictionary
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/createsequencedictionary/main.nf b/modules/nf-core/gatk4/createsequencedictionary/main.nf
index 3e4efdd983..b47ad16221 100644
--- a/modules/nf-core/gatk4/createsequencedictionary/main.nf
+++ b/modules/nf-core/gatk4/createsequencedictionary/main.nf
@@ -2,7 +2,7 @@ process GATK4_CREATESEQUENCEDICTIONARY {
tag "$fasta"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/createsequencedictionary/meta.yml b/modules/nf-core/gatk4/createsequencedictionary/meta.yml
index 9b8b8c8917..f9d70be098 100644
--- a/modules/nf-core/gatk4/createsequencedictionary/meta.yml
+++ b/modules/nf-core/gatk4/createsequencedictionary/meta.yml
@@ -15,7 +15,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -38,3 +37,6 @@ output:
authors:
- "@maxulysse"
- "@ramprasadn"
+maintainers:
+ - "@maxulysse"
+ - "@ramprasadn"
diff --git a/modules/nf-core/gatk4/estimatelibrarycomplexity/environment.yml b/modules/nf-core/gatk4/estimatelibrarycomplexity/environment.yml
new file mode 100644
index 0000000000..fabb6f2ba4
--- /dev/null
+++ b/modules/nf-core/gatk4/estimatelibrarycomplexity/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_estimatelibrarycomplexity
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/estimatelibrarycomplexity/main.nf b/modules/nf-core/gatk4/estimatelibrarycomplexity/main.nf
index 81fc83513d..c0eef7b327 100644
--- a/modules/nf-core/gatk4/estimatelibrarycomplexity/main.nf
+++ b/modules/nf-core/gatk4/estimatelibrarycomplexity/main.nf
@@ -2,7 +2,7 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/estimatelibrarycomplexity/meta.yml b/modules/nf-core/gatk4/estimatelibrarycomplexity/meta.yml
index 2783152a59..2d5bddf6c9 100644
--- a/modules/nf-core/gatk4/estimatelibrarycomplexity/meta.yml
+++ b/modules/nf-core/gatk4/estimatelibrarycomplexity/meta.yml
@@ -13,7 +13,6 @@ tools:
tool_dev_url: https://github.com/broadinstitute/gatk
doi: "10.1158/1538-7445.AM2017-3590"
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -36,7 +35,6 @@ input:
type: file
description: GATK sequence dictionary
pattern: "*.dict"
-
output:
- meta:
type: map
@@ -51,7 +49,9 @@ output:
type: file
description: File containing metrics on the input files
pattern: "*.{metrics}"
-
authors:
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4/filtermutectcalls/environment.yml b/modules/nf-core/gatk4/filtermutectcalls/environment.yml
new file mode 100644
index 0000000000..8057d765d5
--- /dev/null
+++ b/modules/nf-core/gatk4/filtermutectcalls/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_filtermutectcalls
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/filtermutectcalls/main.nf b/modules/nf-core/gatk4/filtermutectcalls/main.nf
index 623b91aece..fa6b46ab3c 100644
--- a/modules/nf-core/gatk4/filtermutectcalls/main.nf
+++ b/modules/nf-core/gatk4/filtermutectcalls/main.nf
@@ -2,7 +2,7 @@ process GATK4_FILTERMUTECTCALLS {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/filtermutectcalls/meta.yml b/modules/nf-core/gatk4/filtermutectcalls/meta.yml
index 095a28e321..736c838625 100644
--- a/modules/nf-core/gatk4/filtermutectcalls/meta.yml
+++ b/modules/nf-core/gatk4/filtermutectcalls/meta.yml
@@ -16,7 +16,6 @@ tools:
homepage: https://gatk.broadinstitute.org/hc/en-us
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
-
input:
- meta:
type: map
@@ -77,7 +76,6 @@ input:
type: file
description: GATK sequence dictionary
pattern: "*.dict"
-
output:
- vcf:
type: file
@@ -95,8 +93,11 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@GCJMackenzie"
- "@maxulysse"
- "@ramprasadn"
+maintainers:
+ - "@GCJMackenzie"
+ - "@maxulysse"
+ - "@ramprasadn"
diff --git a/modules/nf-core/gatk4/filtervarianttranches/environment.yml b/modules/nf-core/gatk4/filtervarianttranches/environment.yml
new file mode 100644
index 0000000000..faeea8ddb0
--- /dev/null
+++ b/modules/nf-core/gatk4/filtervarianttranches/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_filtervarianttranches
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/filtervarianttranches/main.nf b/modules/nf-core/gatk4/filtervarianttranches/main.nf
index 90cbf5f0a6..9da47ab739 100644
--- a/modules/nf-core/gatk4/filtervarianttranches/main.nf
+++ b/modules/nf-core/gatk4/filtervarianttranches/main.nf
@@ -2,7 +2,7 @@ process GATK4_FILTERVARIANTTRANCHES {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/filtervarianttranches/meta.yml b/modules/nf-core/gatk4/filtervarianttranches/meta.yml
index c5325b4891..9346d2b4a4 100644
--- a/modules/nf-core/gatk4/filtervarianttranches/meta.yml
+++ b/modules/nf-core/gatk4/filtervarianttranches/meta.yml
@@ -14,7 +14,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360051308071-FilterVariantTranches
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -49,7 +48,6 @@ input:
type: file
description: GATK sequence dictionary
pattern: ".dict"
-
output:
- meta:
type: map
@@ -68,6 +66,7 @@ output:
type: file
description: VCF index file
pattern: "*.vcf.gz.tbi"
-
authors:
- "@FriederikeHanssen"
+maintainers:
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/gatk4/gatherbqsrreports/environment.yml b/modules/nf-core/gatk4/gatherbqsrreports/environment.yml
new file mode 100644
index 0000000000..928ac76e6b
--- /dev/null
+++ b/modules/nf-core/gatk4/gatherbqsrreports/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_gatherbqsrreports
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/gatherbqsrreports/main.nf b/modules/nf-core/gatk4/gatherbqsrreports/main.nf
index 3eeca5ad90..e783701017 100644
--- a/modules/nf-core/gatk4/gatherbqsrreports/main.nf
+++ b/modules/nf-core/gatk4/gatherbqsrreports/main.nf
@@ -2,7 +2,7 @@ process GATK4_GATHERBQSRREPORTS {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/gatherbqsrreports/meta.yml b/modules/nf-core/gatk4/gatherbqsrreports/meta.yml
index d9faf09d45..b9f5bf5f8b 100644
--- a/modules/nf-core/gatk4/gatherbqsrreports/meta.yml
+++ b/modules/nf-core/gatk4/gatherbqsrreports/meta.yml
@@ -13,7 +13,6 @@ tools:
tool_dev_url: https://github.com/broadinstitute/gatk
doi: "10.1158/1538-7445.AM2017-3590"
licence: ["BSD-3-clause"]
-
input:
- meta:
type: map
@@ -24,7 +23,6 @@ input:
type: file
description: File(s) containing BQSR table(s)
pattern: "*.table"
-
output:
- meta:
type: map
@@ -39,6 +37,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@FriederikeHanssen"
+maintainers:
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/gatk4/gatherpileupsummaries/environment.yml b/modules/nf-core/gatk4/gatherpileupsummaries/environment.yml
new file mode 100644
index 0000000000..1a2ebf5761
--- /dev/null
+++ b/modules/nf-core/gatk4/gatherpileupsummaries/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_gatherpileupsummaries
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/gatherpileupsummaries/main.nf b/modules/nf-core/gatk4/gatherpileupsummaries/main.nf
index f315e1af3b..1863133d2c 100644
--- a/modules/nf-core/gatk4/gatherpileupsummaries/main.nf
+++ b/modules/nf-core/gatk4/gatherpileupsummaries/main.nf
@@ -2,7 +2,7 @@ process GATK4_GATHERPILEUPSUMMARIES {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/gatherpileupsummaries/meta.yml b/modules/nf-core/gatk4/gatherpileupsummaries/meta.yml
index 6b8569dd5b..35381a3b51 100644
--- a/modules/nf-core/gatk4/gatherpileupsummaries/meta.yml
+++ b/modules/nf-core/gatk4/gatherpileupsummaries/meta.yml
@@ -12,7 +12,6 @@ tools:
tool_dev_url: https://github.com/broadinstitute/gatk
doi: "10.1158/1538-7445.AM2017-3590"
licence: ["BSD-3-clause"]
-
input:
- meta:
type: map
@@ -23,7 +22,6 @@ input:
type: file
description: Pileup files from gatk4/getpileupsummaries
pattern: "*.pileups.table"
-
output:
- meta:
type: map
@@ -38,7 +36,9 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4/genomicsdbimport/environment.yml b/modules/nf-core/gatk4/genomicsdbimport/environment.yml
new file mode 100644
index 0000000000..ce3f941694
--- /dev/null
+++ b/modules/nf-core/gatk4/genomicsdbimport/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_genomicsdbimport
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/genomicsdbimport/main.nf b/modules/nf-core/gatk4/genomicsdbimport/main.nf
index a8725d3f9a..916037ebef 100644
--- a/modules/nf-core/gatk4/genomicsdbimport/main.nf
+++ b/modules/nf-core/gatk4/genomicsdbimport/main.nf
@@ -2,7 +2,7 @@ process GATK4_GENOMICSDBIMPORT {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/genomicsdbimport/meta.yml b/modules/nf-core/gatk4/genomicsdbimport/meta.yml
index ff114d7d20..ca8fe3d076 100644
--- a/modules/nf-core/gatk4/genomicsdbimport/meta.yml
+++ b/modules/nf-core/gatk4/genomicsdbimport/meta.yml
@@ -15,7 +15,6 @@ tools:
homepage: https://gatk.broadinstitute.org/hc/en-us
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
-
input:
- meta:
type: map
@@ -26,42 +25,34 @@ input:
type: list
description: either a list of vcf files to be used to create or update a genomicsdb, or a file that contains a map to vcf files to be used.
pattern: "*.vcf.gz"
-
- tbi:
type: list
description: list of tbi files that match with the input vcf files
pattern: "*.vcf.gz_tbi"
-
- wspace:
type: file
description: path to an existing genomicsdb to be used in update db mode or get intervals mode. This WILL NOT specify name of a new genomicsdb in create db mode.
pattern: "/path/to/existing/gendb"
-
- intervalfile:
type: file
description: file containing the intervals to be used when creating the genomicsdb
pattern: "*.interval_list"
-
- intervalval:
type: string
description: if an intervals file has not been spcified, the value enetered here will be used as an interval via the "-L" argument
pattern: "example: chr1:1000-10000"
-
- run_intlist:
type: boolean
description: Specify whether to run get interval list mode, this option cannot be specified at the same time as run_updatewspace.
pattern: "true/false"
-
- run_updatewspace:
type: boolean
description: Specify whether to run update genomicsdb mode, this option takes priority over run_intlist.
pattern: "true/false"
-
- input_map:
type: boolean
description: Specify whether the vcf input is providing a list of vcf file(s) or a single file containing a map of paths to vcf files to be used to create or update a genomicsdb.
pattern: "*.sample_map"
-
output:
- genomicsdb:
type: directory
@@ -79,6 +70,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@GCJMackenzie"
+maintainers:
+ - "@GCJMackenzie"
diff --git a/modules/nf-core/gatk4/genotypegvcfs/environment.yml b/modules/nf-core/gatk4/genotypegvcfs/environment.yml
new file mode 100644
index 0000000000..49f213790d
--- /dev/null
+++ b/modules/nf-core/gatk4/genotypegvcfs/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_genotypegvcfs
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/genotypegvcfs/main.nf b/modules/nf-core/gatk4/genotypegvcfs/main.nf
index a3e3129fcf..c6c0ba501d 100644
--- a/modules/nf-core/gatk4/genotypegvcfs/main.nf
+++ b/modules/nf-core/gatk4/genotypegvcfs/main.nf
@@ -2,7 +2,7 @@ process GATK4_GENOTYPEGVCFS {
tag "$meta.id"
label 'process_high'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/genotypegvcfs/meta.yml b/modules/nf-core/gatk4/genotypegvcfs/meta.yml
index d759270d6a..8f1e377eb9 100644
--- a/modules/nf-core/gatk4/genotypegvcfs/meta.yml
+++ b/modules/nf-core/gatk4/genotypegvcfs/meta.yml
@@ -14,7 +14,6 @@ tools:
tool_dev_url: https://github.com/broadinstitute/gatk
doi: "10.1158/1538-7445.AM2017-3590"
licence: ["BSD-3-clause"]
-
input:
- meta:
type: map
@@ -57,7 +56,6 @@ input:
type: file
description: dbSNP VCF index file
pattern: "*.tbi"
-
output:
- meta:
type: map
@@ -76,7 +74,9 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@santiagorevale"
- "@maxulysse"
+maintainers:
+ - "@santiagorevale"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4/getpileupsummaries/environment.yml b/modules/nf-core/gatk4/getpileupsummaries/environment.yml
new file mode 100644
index 0000000000..d650467cf8
--- /dev/null
+++ b/modules/nf-core/gatk4/getpileupsummaries/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_getpileupsummaries
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/getpileupsummaries/main.nf b/modules/nf-core/gatk4/getpileupsummaries/main.nf
index f7d0f2942c..d509cdf3bb 100644
--- a/modules/nf-core/gatk4/getpileupsummaries/main.nf
+++ b/modules/nf-core/gatk4/getpileupsummaries/main.nf
@@ -2,7 +2,7 @@ process GATK4_GETPILEUPSUMMARIES {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/getpileupsummaries/meta.yml b/modules/nf-core/gatk4/getpileupsummaries/meta.yml
index 6aaa4b9e7c..fab3c1435e 100644
--- a/modules/nf-core/gatk4/getpileupsummaries/meta.yml
+++ b/modules/nf-core/gatk4/getpileupsummaries/meta.yml
@@ -16,7 +16,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -70,7 +69,6 @@ input:
type: file
description: Index file for the germline resource.
pattern: "*.vcf.gz.tbi"
-
output:
- pileup:
type: file
@@ -80,6 +78,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@GCJMackenzie"
+maintainers:
+ - "@GCJMackenzie"
diff --git a/modules/nf-core/gatk4/haplotypecaller/environment.yml b/modules/nf-core/gatk4/haplotypecaller/environment.yml
new file mode 100644
index 0000000000..0c8f32fa63
--- /dev/null
+++ b/modules/nf-core/gatk4/haplotypecaller/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_haplotypecaller
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/haplotypecaller/main.nf b/modules/nf-core/gatk4/haplotypecaller/main.nf
index 9ac87518e9..fdecf5f830 100644
--- a/modules/nf-core/gatk4/haplotypecaller/main.nf
+++ b/modules/nf-core/gatk4/haplotypecaller/main.nf
@@ -2,7 +2,7 @@ process GATK4_HAPLOTYPECALLER {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/haplotypecaller/meta.yml b/modules/nf-core/gatk4/haplotypecaller/meta.yml
index 03afe29427..f38dc37dd0 100644
--- a/modules/nf-core/gatk4/haplotypecaller/meta.yml
+++ b/modules/nf-core/gatk4/haplotypecaller/meta.yml
@@ -14,7 +14,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -54,7 +53,6 @@ input:
- dbsnp_tbi:
type: file
description: VCF index of dbsnp (optional)
-
output:
- meta:
type: map
@@ -77,7 +75,9 @@ output:
type: file
description: Assembled haplotypes and locally realigned reads
pattern: "*.realigned.bam"
-
authors:
- "@suzannejin"
- "@FriederikeHanssen"
+maintainers:
+ - "@suzannejin"
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/gatk4/intervallisttobed/environment.yml b/modules/nf-core/gatk4/intervallisttobed/environment.yml
new file mode 100644
index 0000000000..06d9f0e9c7
--- /dev/null
+++ b/modules/nf-core/gatk4/intervallisttobed/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_intervallisttobed
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/intervallisttobed/main.nf b/modules/nf-core/gatk4/intervallisttobed/main.nf
index 2537f0aa68..89772081e0 100644
--- a/modules/nf-core/gatk4/intervallisttobed/main.nf
+++ b/modules/nf-core/gatk4/intervallisttobed/main.nf
@@ -2,7 +2,7 @@ process GATK4_INTERVALLISTTOBED {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/intervallisttobed/meta.yml b/modules/nf-core/gatk4/intervallisttobed/meta.yml
index df3705aa15..28d264dfef 100644
--- a/modules/nf-core/gatk4/intervallisttobed/meta.yml
+++ b/modules/nf-core/gatk4/intervallisttobed/meta.yml
@@ -13,7 +13,6 @@ tools:
tool_dev_url: https://github.com/broadinstitute/gatk
doi: "10.1158/1538-7445.AM2017-3590"
licence: ["BSD-3-clause"]
-
input:
- meta:
type: map
@@ -24,7 +23,6 @@ input:
type: file
description: Interval list
pattern: "*.{interval,interval_list}"
-
output:
- meta:
type: map
@@ -39,6 +37,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@FriederikeHanssen"
+maintainers:
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/gatk4/learnreadorientationmodel/environment.yml b/modules/nf-core/gatk4/learnreadorientationmodel/environment.yml
new file mode 100644
index 0000000000..d1c35caf83
--- /dev/null
+++ b/modules/nf-core/gatk4/learnreadorientationmodel/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_learnreadorientationmodel
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/learnreadorientationmodel/main.nf b/modules/nf-core/gatk4/learnreadorientationmodel/main.nf
index 89a6ae77b6..c4e39db74a 100644
--- a/modules/nf-core/gatk4/learnreadorientationmodel/main.nf
+++ b/modules/nf-core/gatk4/learnreadorientationmodel/main.nf
@@ -2,7 +2,7 @@ process GATK4_LEARNREADORIENTATIONMODEL {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/learnreadorientationmodel/meta.yml b/modules/nf-core/gatk4/learnreadorientationmodel/meta.yml
index e29fee2251..4b73a51adb 100644
--- a/modules/nf-core/gatk4/learnreadorientationmodel/meta.yml
+++ b/modules/nf-core/gatk4/learnreadorientationmodel/meta.yml
@@ -16,7 +16,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -27,7 +26,6 @@ input:
type: list
description: list of f1r2 files to be used as input.
pattern: "*.f1r2.tar.gz"
-
output:
- artifactprior:
type: file
@@ -37,6 +35,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@GCJMackenzie"
+maintainers:
+ - "@GCJMackenzie"
diff --git a/modules/nf-core/gatk4/markduplicates/environment.yml b/modules/nf-core/gatk4/markduplicates/environment.yml
new file mode 100644
index 0000000000..9adad104d8
--- /dev/null
+++ b/modules/nf-core/gatk4/markduplicates/environment.yml
@@ -0,0 +1,8 @@
+name: gatk4_markduplicates
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/gatk4/markduplicates/main.nf b/modules/nf-core/gatk4/markduplicates/main.nf
index e4c01f9a2e..564b86d3dd 100644
--- a/modules/nf-core/gatk4/markduplicates/main.nf
+++ b/modules/nf-core/gatk4/markduplicates/main.nf
@@ -2,7 +2,7 @@ process GATK4_MARKDUPLICATES {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0 bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-d9e7bad0f7fbc8f4458d5c3ab7ffaaf0235b59fb:f857e2d6cc88d35580d01cf39e0959a68b83c1d9-0':
'biocontainers/mulled-v2-d9e7bad0f7fbc8f4458d5c3ab7ffaaf0235b59fb:f857e2d6cc88d35580d01cf39e0959a68b83c1d9-0' }"
@@ -65,4 +65,21 @@ process GATK4_MARKDUPLICATES {
samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
END_VERSIONS
"""
+
+ stub:
+ prefix = task.ext.prefix ?: "${meta.id}.bam"
+ prefix_no_suffix = task.ext.prefix ? prefix.tokenize('.')[0] : "${meta.id}"
+ """
+ touch ${prefix_no_suffix}.bam
+ touch ${prefix_no_suffix}.cram
+ touch ${prefix_no_suffix}.cram.crai
+ touch ${prefix_no_suffix}.bai
+ touch ${prefix}.metrics
+
+ cat <<-END_VERSIONS > versions.yml
+ "${task.process}":
+ gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//')
+ samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
+ END_VERSIONS
+ """
}
diff --git a/modules/nf-core/gatk4/markduplicates/meta.yml b/modules/nf-core/gatk4/markduplicates/meta.yml
index d3e755054a..b0f09d4b84 100644
--- a/modules/nf-core/gatk4/markduplicates/meta.yml
+++ b/modules/nf-core/gatk4/markduplicates/meta.yml
@@ -7,16 +7,12 @@ keywords:
- sort
tools:
- gatk4:
- description:
- Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools
- with a primary focus on variant discovery and genotyping. Its powerful processing engine
- and high-performance computing features make it capable of taking on projects of any size.
+ description: Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools with a primary focus on variant discovery and genotyping. Its powerful processing engine and high-performance computing features make it capable of taking on projects of any size.
homepage: https://gatk.broadinstitute.org/hc/en-us
documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360037052812-MarkDuplicates-Picard-
tool_dev_url: https://github.com/broadinstitute/gatk
doi: 10.1158/1538-7445.AM2017-3590
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -35,7 +31,6 @@ input:
type: file
description: Fasta index file
pattern: "*.{fai}"
-
output:
- meta:
type: map
@@ -66,8 +61,11 @@ output:
type: file
description: Duplicate metrics file generated by GATK
pattern: "*.{metrics.txt}"
-
authors:
- "@ajodeh-juma"
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@ajodeh-juma"
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4/mergemutectstats/environment.yml b/modules/nf-core/gatk4/mergemutectstats/environment.yml
new file mode 100644
index 0000000000..dd132c3a3d
--- /dev/null
+++ b/modules/nf-core/gatk4/mergemutectstats/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_mergemutectstats
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/mergemutectstats/main.nf b/modules/nf-core/gatk4/mergemutectstats/main.nf
index 269721cbf9..3a4913220c 100644
--- a/modules/nf-core/gatk4/mergemutectstats/main.nf
+++ b/modules/nf-core/gatk4/mergemutectstats/main.nf
@@ -2,7 +2,7 @@ process GATK4_MERGEMUTECTSTATS {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/mergemutectstats/meta.yml b/modules/nf-core/gatk4/mergemutectstats/meta.yml
index f75833c9bc..1269525657 100644
--- a/modules/nf-core/gatk4/mergemutectstats/meta.yml
+++ b/modules/nf-core/gatk4/mergemutectstats/meta.yml
@@ -13,7 +13,6 @@ tools:
tool_dev_url: https://github.com/broadinstitute/gatk
doi: "10.1158/1538-7445.AM2017-3590"
licence: ["BSD-3-clause"]
-
input:
- meta:
type: map
@@ -24,7 +23,6 @@ input:
type: file
description: Stats file
pattern: "*.{stats}"
-
output:
- meta:
type: map
@@ -39,6 +37,7 @@ output:
type: file
description: Stats file
pattern: "*.vcf.gz.stats"
-
authors:
- "@FriederikeHanssen"
+maintainers:
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/gatk4/mergevcfs/environment.yml b/modules/nf-core/gatk4/mergevcfs/environment.yml
new file mode 100644
index 0000000000..d6c3e51a9f
--- /dev/null
+++ b/modules/nf-core/gatk4/mergevcfs/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_mergevcfs
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/mergevcfs/main.nf b/modules/nf-core/gatk4/mergevcfs/main.nf
index 29c08e169c..3362c2bdad 100644
--- a/modules/nf-core/gatk4/mergevcfs/main.nf
+++ b/modules/nf-core/gatk4/mergevcfs/main.nf
@@ -2,7 +2,7 @@ process GATK4_MERGEVCFS {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/mergevcfs/meta.yml b/modules/nf-core/gatk4/mergevcfs/meta.yml
index 4e11311c17..30290a854f 100644
--- a/modules/nf-core/gatk4/mergevcfs/meta.yml
+++ b/modules/nf-core/gatk4/mergevcfs/meta.yml
@@ -33,7 +33,6 @@ input:
type: file
description: Optional Sequence Dictionary as input
pattern: "*.dict"
-
output:
- vcf:
type: file
@@ -43,10 +42,11 @@ output:
type: file
description: index files for the merged vcf files
pattern: "*.tbi"
-
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
authors:
- "@kevinmenden"
+maintainers:
+ - "@kevinmenden"
diff --git a/modules/nf-core/gatk4/mutect2/environment.yml b/modules/nf-core/gatk4/mutect2/environment.yml
new file mode 100644
index 0000000000..54da66ce57
--- /dev/null
+++ b/modules/nf-core/gatk4/mutect2/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_mutect2
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/mutect2/main.nf b/modules/nf-core/gatk4/mutect2/main.nf
index 4e353979db..721e94f3e4 100644
--- a/modules/nf-core/gatk4/mutect2/main.nf
+++ b/modules/nf-core/gatk4/mutect2/main.nf
@@ -2,7 +2,7 @@ process GATK4_MUTECT2 {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/mutect2/meta.yml b/modules/nf-core/gatk4/mutect2/meta.yml
index 693aeb296a..21c928ed96 100644
--- a/modules/nf-core/gatk4/mutect2/meta.yml
+++ b/modules/nf-core/gatk4/mutect2/meta.yml
@@ -17,7 +17,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -79,7 +78,6 @@ input:
type: file
description: Index for the panel of normals.
pattern: "*.vcf.gz.tbi"
-
output:
- vcf:
type: file
@@ -101,7 +99,9 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@GCJMackenzie"
- "@ramprasadn"
+maintainers:
+ - "@GCJMackenzie"
+ - "@ramprasadn"
diff --git a/modules/nf-core/gatk4/variantrecalibrator/environment.yml b/modules/nf-core/gatk4/variantrecalibrator/environment.yml
new file mode 100644
index 0000000000..619208a56d
--- /dev/null
+++ b/modules/nf-core/gatk4/variantrecalibrator/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4_variantrecalibrator
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4=4.4.0.0
diff --git a/modules/nf-core/gatk4/variantrecalibrator/main.nf b/modules/nf-core/gatk4/variantrecalibrator/main.nf
index fa262e4a95..f9cd45ac94 100644
--- a/modules/nf-core/gatk4/variantrecalibrator/main.nf
+++ b/modules/nf-core/gatk4/variantrecalibrator/main.nf
@@ -2,7 +2,7 @@ process GATK4_VARIANTRECALIBRATOR {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0':
'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }"
diff --git a/modules/nf-core/gatk4/variantrecalibrator/meta.yml b/modules/nf-core/gatk4/variantrecalibrator/meta.yml
index 5ade4d89b1..39a415b61c 100644
--- a/modules/nf-core/gatk4/variantrecalibrator/meta.yml
+++ b/modules/nf-core/gatk4/variantrecalibrator/meta.yml
@@ -18,7 +18,6 @@ tools:
homepage: https://gatk.broadinstitute.org/hc/en-us
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
-
input:
- meta:
type: map
@@ -80,3 +79,6 @@ output:
authors:
- "@GCJMackenzie"
- "@nickhsmith"
+maintainers:
+ - "@GCJMackenzie"
+ - "@nickhsmith"
diff --git a/modules/nf-core/gatk4spark/applybqsr/environment.yml b/modules/nf-core/gatk4spark/applybqsr/environment.yml
new file mode 100644
index 0000000000..709dd488c9
--- /dev/null
+++ b/modules/nf-core/gatk4spark/applybqsr/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4spark_applybqsr
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4-spark=4.4.0.0
diff --git a/modules/nf-core/gatk4/applybqsrspark/main.nf b/modules/nf-core/gatk4spark/applybqsr/main.nf
similarity index 81%
rename from modules/nf-core/gatk4/applybqsrspark/main.nf
rename to modules/nf-core/gatk4spark/applybqsr/main.nf
index 7a4c29bbca..170dbeeafd 100644
--- a/modules/nf-core/gatk4/applybqsrspark/main.nf
+++ b/modules/nf-core/gatk4spark/applybqsr/main.nf
@@ -1,9 +1,11 @@
-process GATK4_APPLYBQSR_SPARK {
+process GATK4SPARK_APPLYBQSR {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.3.0.0 conda-forge::openjdk=8.0.312"
- container "nf-core/gatk:4.4.0.0"
+ conda "${moduleDir}/environment.yml"
+ container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
+ 'https://depot.galaxyproject.org/singularity/gatk4-spark:4.4.0.0--hdfd78af_0':
+ 'biocontainers/gatk4-spark:4.4.0.0--hdfd78af_0' }"
input:
tuple val(meta), path(input), path(input_index), path(bqsr_table), path(intervals)
diff --git a/modules/nf-core/gatk4/applybqsrspark/meta.yml b/modules/nf-core/gatk4spark/applybqsr/meta.yml
similarity index 94%
rename from modules/nf-core/gatk4/applybqsrspark/meta.yml
rename to modules/nf-core/gatk4spark/applybqsr/meta.yml
index b253fc78ce..4904568d2e 100644
--- a/modules/nf-core/gatk4/applybqsrspark/meta.yml
+++ b/modules/nf-core/gatk4spark/applybqsr/meta.yml
@@ -1,12 +1,11 @@
-name: gatk4_applybqsr_spark
+name: gatk4spark_applybqsr
description: Apply base quality score recalibration (BQSR) to a bam file
keywords:
- bam
- base quality score recalibration
- bqsr
- cram
- - gatk4
- - spark
+ - gatk4spark
tools:
- gatk4:
description: |
@@ -17,7 +16,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -50,7 +48,6 @@ input:
type: file
description: GATK sequence dictionary
pattern: "*.dict"
-
output:
- meta:
type: map
@@ -69,8 +66,11 @@ output:
type: file
description: Recalibrated CRAM file
pattern: "*.{cram}"
-
authors:
- "@yocra3"
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@yocra3"
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4spark/baserecalibrator/environment.yml b/modules/nf-core/gatk4spark/baserecalibrator/environment.yml
new file mode 100644
index 0000000000..bf2568a2a8
--- /dev/null
+++ b/modules/nf-core/gatk4spark/baserecalibrator/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4spark_baserecalibrator
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4-spark=4.4.0.0
diff --git a/modules/nf-core/gatk4/baserecalibratorspark/main.nf b/modules/nf-core/gatk4spark/baserecalibrator/main.nf
similarity index 80%
rename from modules/nf-core/gatk4/baserecalibratorspark/main.nf
rename to modules/nf-core/gatk4spark/baserecalibrator/main.nf
index 6db088bb75..ee44bf7d66 100644
--- a/modules/nf-core/gatk4/baserecalibratorspark/main.nf
+++ b/modules/nf-core/gatk4spark/baserecalibrator/main.nf
@@ -1,9 +1,11 @@
-process GATK4_BASERECALIBRATOR_SPARK {
+process GATK4SPARK_BASERECALIBRATOR {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gatk4=4.4.0.0 conda-forge::openjdk=8.0.312"
- container "nf-core/gatk:4.4.0.0"
+ conda "${moduleDir}/environment.yml"
+ container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
+ 'https://depot.galaxyproject.org/singularity/gatk4-spark:4.4.0.0--hdfd78af_0':
+ 'biocontainers/gatk4-spark:4.4.0.0--hdfd78af_0' }"
input:
tuple val(meta), path(input), path(input_index), path(intervals)
diff --git a/modules/nf-core/gatk4/baserecalibratorspark/meta.yml b/modules/nf-core/gatk4spark/baserecalibrator/meta.yml
similarity index 94%
rename from modules/nf-core/gatk4/baserecalibratorspark/meta.yml
rename to modules/nf-core/gatk4spark/baserecalibrator/meta.yml
index d175ca13a1..dd334a225f 100644
--- a/modules/nf-core/gatk4/baserecalibratorspark/meta.yml
+++ b/modules/nf-core/gatk4spark/baserecalibrator/meta.yml
@@ -1,12 +1,11 @@
-name: gatk4_baserecalibrator_spark
+name: gatk4spark_baserecalibrator
description: Generate recalibration table for Base Quality Score Recalibration (BQSR)
keywords:
- base quality score recalibration
- table
- bqsr
- - gatk4
+ - gatk4spark
- sort
- - spark
tools:
- gatk4:
description: |
@@ -17,7 +16,6 @@ tools:
documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s
doi: 10.1158/1538-7445.AM2017-3590
licence: ["Apache-2.0"]
-
input:
- meta:
type: map
@@ -55,7 +53,6 @@ input:
type: file
description: Tabix index of the known_sites (optional)
pattern: "*.vcf.gz.tbi"
-
output:
- meta:
type: map
@@ -70,8 +67,11 @@ output:
type: file
description: Recalibration table from BaseRecalibrator
pattern: "*.{table}"
-
authors:
- "@yocra3"
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@yocra3"
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/gatk4spark/markduplicates/environment.yml b/modules/nf-core/gatk4spark/markduplicates/environment.yml
new file mode 100644
index 0000000000..3e33d7fe3d
--- /dev/null
+++ b/modules/nf-core/gatk4spark/markduplicates/environment.yml
@@ -0,0 +1,7 @@
+name: gatk4spark_markduplicates
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gatk4-spark=4.4.0.0
diff --git a/modules/nf-core/gatk4/markduplicatesspark/main.nf b/modules/nf-core/gatk4spark/markduplicates/main.nf
similarity index 81%
rename from modules/nf-core/gatk4/markduplicatesspark/main.nf
rename to modules/nf-core/gatk4spark/markduplicates/main.nf
index f318ed50a6..61e295c839 100644
--- a/modules/nf-core/gatk4/markduplicatesspark/main.nf
+++ b/modules/nf-core/gatk4spark/markduplicates/main.nf
@@ -1,9 +1,11 @@
-process GATK4_MARKDUPLICATES_SPARK {
+process GATK4SPARK_MARKDUPLICATES {
tag "$meta.id"
label 'process_high'
- conda "bioconda::gatk4=4.4.0.0 conda-forge::openjdk=8.0.312"
- container "nf-core/gatk:4.4.0.0"
+ conda "${moduleDir}/environment.yml"
+ container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
+ 'https://depot.galaxyproject.org/singularity/gatk4-spark:4.4.0.0--hdfd78af_0':
+ 'biocontainers/gatk4-spark:4.4.0.0--hdfd78af_0' }"
input:
tuple val(meta), path(bam)
diff --git a/modules/nf-core/gatk4/markduplicatesspark/meta.yml b/modules/nf-core/gatk4spark/markduplicates/meta.yml
similarity index 78%
rename from modules/nf-core/gatk4/markduplicatesspark/meta.yml
rename to modules/nf-core/gatk4spark/markduplicates/meta.yml
index c9bb263a96..016a215b25 100644
--- a/modules/nf-core/gatk4/markduplicatesspark/meta.yml
+++ b/modules/nf-core/gatk4spark/markduplicates/meta.yml
@@ -1,23 +1,18 @@
-name: gatk4_markduplicates_spark
+name: gatk4spark_markduplicates
description: This tool locates and tags duplicate reads in a BAM or SAM file, where duplicate reads are defined as originating from a single fragment of DNA.
keywords:
- bam
- - gatk4
+ - gatk4spark
- markduplicates
- sort
- - spark
tools:
- gatk4:
- description:
- Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools
- with a primary focus on variant discovery and genotyping. Its powerful processing engine
- and high-performance computing features make it capable of taking on projects of any size.
+ description: Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools with a primary focus on variant discovery and genotyping. Its powerful processing engine and high-performance computing features make it capable of taking on projects of any size.
homepage: https://gatk.broadinstitute.org/hc/en-us
documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360037052812-MarkDuplicates-Picard-
tool_dev_url: https://github.com/broadinstitute/gatk
doi: 10.1158/1538-7445.AM2017-3590
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -40,7 +35,6 @@ input:
type: file
description: GATK sequence dictionary
pattern: "*.dict"
-
output:
- meta:
type: map
@@ -59,9 +53,13 @@ output:
type: file
description: Optional BAM index file
pattern: "*.bai"
-
authors:
- "@ajodeh-juma"
- "@FriederikeHanssen"
- "@maxulysse"
- "@SusiJo"
+maintainers:
+ - "@ajodeh-juma"
+ - "@FriederikeHanssen"
+ - "@maxulysse"
+ - "@SusiJo"
diff --git a/modules/nf-core/manta/germline/environment.yml b/modules/nf-core/manta/germline/environment.yml
new file mode 100644
index 0000000000..4a63d3084b
--- /dev/null
+++ b/modules/nf-core/manta/germline/environment.yml
@@ -0,0 +1,7 @@
+name: manta_germline
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::manta=1.6.0
diff --git a/modules/nf-core/manta/germline/main.nf b/modules/nf-core/manta/germline/main.nf
index e052b7c9f0..5d5666c6e5 100644
--- a/modules/nf-core/manta/germline/main.nf
+++ b/modules/nf-core/manta/germline/main.nf
@@ -3,7 +3,7 @@ process MANTA_GERMLINE {
label 'process_medium'
label 'error_retry'
- conda "bioconda::manta=1.6.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' :
'biocontainers/manta:1.6.0--h9ee0642_1' }"
@@ -13,6 +13,7 @@ process MANTA_GERMLINE {
tuple val(meta), path(input), path(index), path(target_bed), path(target_bed_tbi)
tuple val(meta2), path(fasta)
tuple val(meta3), path(fai)
+ path(config)
output:
tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf
@@ -31,27 +32,29 @@ process MANTA_GERMLINE {
def prefix = task.ext.prefix ?: "${meta.id}"
def input_files = input.collect{"--bam ${it}"}.join(' ')
def options_manta = target_bed ? "--callRegions $target_bed" : ""
+ def config_option = config ? "--config ${config}" : ""
"""
- configManta.py \
- ${input_files} \
- --reference $fasta \
- --runDir manta \
- $options_manta \
+ configManta.py \\
+ ${input_files} \\
+ ${config_option} \\
+ --reference $fasta \\
+ --runDir manta \\
+ $options_manta \\
$args
python manta/runWorkflow.py -m local -j $task.cpus
- mv manta/results/variants/candidateSmallIndels.vcf.gz \
+ mv manta/results/variants/candidateSmallIndels.vcf.gz \\
${prefix}.candidate_small_indels.vcf.gz
- mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \
+ mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \\
${prefix}.candidate_small_indels.vcf.gz.tbi
- mv manta/results/variants/candidateSV.vcf.gz \
+ mv manta/results/variants/candidateSV.vcf.gz \\
${prefix}.candidate_sv.vcf.gz
- mv manta/results/variants/candidateSV.vcf.gz.tbi \
+ mv manta/results/variants/candidateSV.vcf.gz.tbi \\
${prefix}.candidate_sv.vcf.gz.tbi
- mv manta/results/variants/diploidSV.vcf.gz \
+ mv manta/results/variants/diploidSV.vcf.gz \\
${prefix}.diploid_sv.vcf.gz
- mv manta/results/variants/diploidSV.vcf.gz.tbi \
+ mv manta/results/variants/diploidSV.vcf.gz.tbi \\
${prefix}.diploid_sv.vcf.gz.tbi
cat <<-END_VERSIONS > versions.yml
diff --git a/modules/nf-core/manta/germline/meta.yml b/modules/nf-core/manta/germline/meta.yml
index 2eb16ada53..72ed15f8bc 100644
--- a/modules/nf-core/manta/germline/meta.yml
+++ b/modules/nf-core/manta/germline/meta.yml
@@ -16,7 +16,6 @@ tools:
tool_dev_url: https://github.com/Illumina/manta
doi: "10.1093/bioinformatics/btv710"
licence: ["GPL v3"]
-
input:
- meta:
type: map
@@ -57,7 +56,10 @@ input:
type: file
description: Genome reference FASTA index file
pattern: "*.{fa.fai,fasta.fai}"
-
+ - config:
+ type: file
+ description: Manta configuration file
+ pattern: "*.{ini,conf,config}"
output:
- meta:
type: map
@@ -92,7 +94,11 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@maxulysse"
- "@ramprasadn"
+ - "@nvnieuwk"
+maintainers:
+ - "@maxulysse"
+ - "@ramprasadn"
+ - "@nvnieuwk"
diff --git a/modules/nf-core/manta/somatic/environment.yml b/modules/nf-core/manta/somatic/environment.yml
new file mode 100644
index 0000000000..aac8827dfc
--- /dev/null
+++ b/modules/nf-core/manta/somatic/environment.yml
@@ -0,0 +1,7 @@
+name: manta_somatic
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::manta=1.6.0
diff --git a/modules/nf-core/manta/somatic/main.nf b/modules/nf-core/manta/somatic/main.nf
index 8ff8d90a11..07511b2f0b 100644
--- a/modules/nf-core/manta/somatic/main.nf
+++ b/modules/nf-core/manta/somatic/main.nf
@@ -3,15 +3,16 @@ process MANTA_SOMATIC {
label 'process_medium'
label 'error_retry'
- conda "bioconda::manta=1.6.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' :
'biocontainers/manta:1.6.0--h9ee0642_1' }"
input:
tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor), path(target_bed), path(target_bed_tbi)
- path fasta
- path fai
+ tuple val(meta2), path(fasta)
+ tuple val(meta3), path(fai)
+ path(config)
output:
tuple val(meta), path("*.candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf
@@ -31,26 +32,53 @@ process MANTA_SOMATIC {
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def options_manta = target_bed ? "--callRegions $target_bed" : ""
-
+ def config_option = config ? "--config ${config}" : ""
"""
- configManta.py \
- --tumorBam $input_tumor \
- --normalBam $input_normal \
- --reference $fasta \
- --runDir manta \
- $options_manta \
+ configManta.py \\
+ --tumorBam $input_tumor \\
+ --normalBam $input_normal \\
+ --reference $fasta \\
+ ${config_option} \\
+ --runDir manta \\
+ $options_manta \\
$args
python manta/runWorkflow.py -m local -j $task.cpus
- mv manta/results/variants/candidateSmallIndels.vcf.gz ${prefix}.candidate_small_indels.vcf.gz
- mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi ${prefix}.candidate_small_indels.vcf.gz.tbi
- mv manta/results/variants/candidateSV.vcf.gz ${prefix}.candidate_sv.vcf.gz
- mv manta/results/variants/candidateSV.vcf.gz.tbi ${prefix}.candidate_sv.vcf.gz.tbi
- mv manta/results/variants/diploidSV.vcf.gz ${prefix}.diploid_sv.vcf.gz
- mv manta/results/variants/diploidSV.vcf.gz.tbi ${prefix}.diploid_sv.vcf.gz.tbi
- mv manta/results/variants/somaticSV.vcf.gz ${prefix}.somatic_sv.vcf.gz
- mv manta/results/variants/somaticSV.vcf.gz.tbi ${prefix}.somatic_sv.vcf.gz.tbi
+ mv manta/results/variants/candidateSmallIndels.vcf.gz \\
+ ${prefix}.candidate_small_indels.vcf.gz
+ mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \\
+ ${prefix}.candidate_small_indels.vcf.gz.tbi
+ mv manta/results/variants/candidateSV.vcf.gz \\
+ ${prefix}.candidate_sv.vcf.gz
+ mv manta/results/variants/candidateSV.vcf.gz.tbi \\
+ ${prefix}.candidate_sv.vcf.gz.tbi
+ mv manta/results/variants/diploidSV.vcf.gz \\
+ ${prefix}.diploid_sv.vcf.gz
+ mv manta/results/variants/diploidSV.vcf.gz.tbi \\
+ ${prefix}.diploid_sv.vcf.gz.tbi
+ mv manta/results/variants/somaticSV.vcf.gz \\
+ ${prefix}.somatic_sv.vcf.gz
+ mv manta/results/variants/somaticSV.vcf.gz.tbi \\
+ ${prefix}.somatic_sv.vcf.gz.tbi
+
+ cat <<-END_VERSIONS > versions.yml
+ "${task.process}":
+ manta: \$( configManta.py --version )
+ END_VERSIONS
+ """
+
+ stub:
+ def prefix = task.ext.prefix ?: "${meta.id}"
+ """
+ touch ${prefix}.candidate_small_indels.vcf.gz
+ touch ${prefix}.candidate_small_indels.vcf.gz.tbi
+ touch ${prefix}.candidate_sv.vcf.gz
+ touch ${prefix}.candidate_sv.vcf.gz.tbi
+ touch ${prefix}.diploid_sv.vcf.gz
+ touch ${prefix}.diploid_sv.vcf.gz.tbi
+ touch ${prefix}.somatic_sv.vcf.gz
+ touch ${prefix}.somatic_sv.vcf.gz.tbi
cat <<-END_VERSIONS > versions.yml
"${task.process}":
diff --git a/modules/nf-core/manta/somatic/meta.yml b/modules/nf-core/manta/somatic/meta.yml
index 457d66a5fd..e658edaaa4 100644
--- a/modules/nf-core/manta/somatic/meta.yml
+++ b/modules/nf-core/manta/somatic/meta.yml
@@ -16,7 +16,6 @@ tools:
tool_dev_url: https://github.com/Illumina/manta
doi: "10.1093/bioinformatics/btv710"
licence: ["GPL v3"]
-
input:
- meta:
type: map
@@ -47,15 +46,28 @@ input:
type: file
description: Index for BED file containing target regions for variant calling
pattern: "*.{bed.tbi}"
+ - meta2:
+ type: map
+ description: |
+ Groovy Map containing reference information
+ e.g. [ id:'genome' ]
- fasta:
type: file
description: Genome reference FASTA file
pattern: "*.{fa,fasta}"
+ - meta3:
+ type: map
+ description: |
+ Groovy Map containing reference information
+ e.g. [ id:'genome' ]
- fai:
type: file
description: Genome reference FASTA index file
pattern: "*.{fa.fai,fasta.fai}"
-
+ - config:
+ type: file
+ description: Manta configuration file
+ pattern: "*.{ini,conf,config}"
output:
- meta:
type: map
@@ -98,6 +110,9 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@FriederikeHanssen"
+ - "@nvnieuwk"
+maintainers:
+ - "@FriederikeHanssen"
+ - "@nvnieuwk"
diff --git a/modules/nf-core/manta/tumoronly/environment.yml b/modules/nf-core/manta/tumoronly/environment.yml
new file mode 100644
index 0000000000..cf5db361e0
--- /dev/null
+++ b/modules/nf-core/manta/tumoronly/environment.yml
@@ -0,0 +1,7 @@
+name: manta_tumoronly
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::manta=1.6.0
diff --git a/modules/nf-core/manta/tumoronly/main.nf b/modules/nf-core/manta/tumoronly/main.nf
index e3d6ca1b14..b047299571 100644
--- a/modules/nf-core/manta/tumoronly/main.nf
+++ b/modules/nf-core/manta/tumoronly/main.nf
@@ -3,15 +3,16 @@ process MANTA_TUMORONLY {
label 'process_medium'
label 'error_retry'
- conda "bioconda::manta=1.6.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' :
'biocontainers/manta:1.6.0--h9ee0642_1' }"
input:
tuple val(meta), path(input), path(input_index), path(target_bed), path(target_bed_tbi)
- path fasta
- path fai
+ tuple val(meta2), path(fasta)
+ tuple val(meta3), path(fai)
+ path(config)
output:
tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf
@@ -29,27 +30,29 @@ process MANTA_TUMORONLY {
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def options_manta = target_bed ? "--callRegions $target_bed" : ""
+ def config_option = config ? "--config ${config}" : ""
"""
- configManta.py \
- --tumorBam $input \
- --reference $fasta \
- --runDir manta \
- $options_manta \
+ configManta.py \\
+ --tumorBam $input \\
+ --reference $fasta \\
+ ${config_option} \\
+ --runDir manta \\
+ $options_manta \\
$args
python manta/runWorkflow.py -m local -j $task.cpus
- mv manta/results/variants/candidateSmallIndels.vcf.gz \
+ mv manta/results/variants/candidateSmallIndels.vcf.gz \\
${prefix}.candidate_small_indels.vcf.gz
- mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \
+ mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \\
${prefix}.candidate_small_indels.vcf.gz.tbi
- mv manta/results/variants/candidateSV.vcf.gz \
+ mv manta/results/variants/candidateSV.vcf.gz \\
${prefix}.candidate_sv.vcf.gz
- mv manta/results/variants/candidateSV.vcf.gz.tbi \
+ mv manta/results/variants/candidateSV.vcf.gz.tbi \\
${prefix}.candidate_sv.vcf.gz.tbi
- mv manta/results/variants/tumorSV.vcf.gz \
+ mv manta/results/variants/tumorSV.vcf.gz \\
${prefix}.tumor_sv.vcf.gz
- mv manta/results/variants/tumorSV.vcf.gz.tbi \
+ mv manta/results/variants/tumorSV.vcf.gz.tbi \\
${prefix}.tumor_sv.vcf.gz.tbi
cat <<-END_VERSIONS > versions.yml
@@ -57,4 +60,20 @@ process MANTA_TUMORONLY {
manta: \$( configManta.py --version )
END_VERSIONS
"""
+
+ stub:
+ def prefix = task.ext.prefix ?: "${meta.id}"
+ """
+ touch ${prefix}.candidate_small_indels.vcf.gz
+ touch ${prefix}.candidate_small_indels.vcf.gz.tbi
+ touch ${prefix}.candidate_sv.vcf.gz
+ touch ${prefix}.candidate_sv.vcf.gz.tbi
+ touch ${prefix}.tumor_sv.vcf.gz
+ touch ${prefix}.tumor_sv.vcf.gz.tbi
+
+ cat <<-END_VERSIONS > versions.yml
+ "${task.process}":
+ manta: \$( configManta.py --version )
+ END_VERSIONS
+ """
}
diff --git a/modules/nf-core/manta/tumoronly/meta.yml b/modules/nf-core/manta/tumoronly/meta.yml
index 398d684365..63556c59b4 100644
--- a/modules/nf-core/manta/tumoronly/meta.yml
+++ b/modules/nf-core/manta/tumoronly/meta.yml
@@ -16,7 +16,6 @@ tools:
tool_dev_url: https://github.com/Illumina/manta
doi: "10.1093/bioinformatics/btv710"
licence: ["GPL v3"]
-
input:
- meta:
type: map
@@ -39,15 +38,28 @@ input:
type: file
description: Index for BED file containing target regions for variant calling
pattern: "*.{bed.tbi}"
+ - meta2:
+ type: map
+ description: |
+ Groovy Map containing reference information
+ e.g. [ id:'genome' ]
- fasta:
type: file
description: Genome reference FASTA file
pattern: "*.{fa,fasta}"
+ - meta3:
+ type: map
+ description: |
+ Groovy Map containing reference information
+ e.g. [ id:'genome' ]
- fai:
type: file
description: Genome reference FASTA index file
pattern: "*.{fa.fai,fasta.fai}"
-
+ - config:
+ type: file
+ description: Manta configuration file
+ pattern: "*.{ini,conf,config}"
output:
- meta:
type: map
@@ -82,6 +94,9 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@maxulysse"
+ - "@nvnieuwk"
+maintainers:
+ - "@maxulysse"
+ - "@nvnieuwk"
diff --git a/modules/nf-core/mosdepth/environment.yml b/modules/nf-core/mosdepth/environment.yml
new file mode 100644
index 0000000000..b12e3cb127
--- /dev/null
+++ b/modules/nf-core/mosdepth/environment.yml
@@ -0,0 +1,8 @@
+name: mosdepth
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ # renovate: datasource=conda depName=bioconda/mosdepth
+ - mosdepth=0.3.3
diff --git a/modules/nf-core/mosdepth/main.nf b/modules/nf-core/mosdepth/main.nf
index 74db3a274b..7dd13ffb51 100644
--- a/modules/nf-core/mosdepth/main.nf
+++ b/modules/nf-core/mosdepth/main.nf
@@ -2,7 +2,7 @@ process MOSDEPTH {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::mosdepth=0.3.3"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mosdepth:0.3.3--hdfd78af_1' :
'biocontainers/mosdepth:0.3.3--hdfd78af_1'}"
diff --git a/modules/nf-core/mosdepth/meta.yml b/modules/nf-core/mosdepth/meta.yml
index adf3893f3e..76263b5af9 100644
--- a/modules/nf-core/mosdepth/meta.yml
+++ b/modules/nf-core/mosdepth/meta.yml
@@ -107,3 +107,8 @@ authors:
- "@drpatelh"
- "@ramprasadn"
- "@matthdsm"
+maintainers:
+ - "@joseespinosa"
+ - "@drpatelh"
+ - "@ramprasadn"
+ - "@matthdsm"
diff --git a/modules/nf-core/msisensorpro/msisomatic/environment.yml b/modules/nf-core/msisensorpro/msisomatic/environment.yml
new file mode 100644
index 0000000000..147a9d6b85
--- /dev/null
+++ b/modules/nf-core/msisensorpro/msisomatic/environment.yml
@@ -0,0 +1,7 @@
+name: msisensorpro_msisomatic
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::msisensor-pro=1.2.0
diff --git a/modules/nf-core/msisensorpro/msisomatic/main.nf b/modules/nf-core/msisensorpro/msisomatic/main.nf
index 50287cb3cd..9b0084d949 100644
--- a/modules/nf-core/msisensorpro/msisomatic/main.nf
+++ b/modules/nf-core/msisensorpro/msisomatic/main.nf
@@ -2,7 +2,7 @@ process MSISENSORPRO_MSISOMATIC {
tag "$meta.id"
label 'process_low'
- conda "bioconda::msisensor-pro=1.2.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msisensor-pro:1.2.0--hfc31af2_0' :
'biocontainers/msisensor-pro:1.2.0--hfc31af2_0' }"
diff --git a/modules/nf-core/msisensorpro/msisomatic/meta.yml b/modules/nf-core/msisensorpro/msisomatic/meta.yml
index bcd95dc72b..a6dda66ff2 100644
--- a/modules/nf-core/msisensorpro/msisomatic/meta.yml
+++ b/modules/nf-core/msisensorpro/msisomatic/meta.yml
@@ -13,7 +13,6 @@ tools:
tool_dev_url: https://github.com/xjtu-omics/msisensor-pro
doi: "10.1016/j.gpb.2020.02.001"
licence: ["Custom Licence"]
-
input:
- meta:
type: map
@@ -48,7 +47,6 @@ input:
type: file
description: Output from msisensor-pro/scan, conaining list of msi regions
pattern: "*.list"
-
output:
- meta:
type: map
@@ -75,6 +73,7 @@ output:
type: file
description: File containing microsatellite list
pattern: "*.{list}"
-
authors:
- "@FriederikeHanssen"
+maintainers:
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/msisensorpro/scan/environment.yml b/modules/nf-core/msisensorpro/scan/environment.yml
new file mode 100644
index 0000000000..377c28a61b
--- /dev/null
+++ b/modules/nf-core/msisensorpro/scan/environment.yml
@@ -0,0 +1,7 @@
+name: msisensorpro_scan
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::msisensor-pro=1.2.0
diff --git a/modules/nf-core/msisensorpro/scan/main.nf b/modules/nf-core/msisensorpro/scan/main.nf
index 760d51aa22..9c7dce2596 100644
--- a/modules/nf-core/msisensorpro/scan/main.nf
+++ b/modules/nf-core/msisensorpro/scan/main.nf
@@ -2,7 +2,7 @@ process MSISENSORPRO_SCAN {
tag "$meta.id"
label 'process_low'
- conda "bioconda::msisensor-pro=1.2.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msisensor-pro:1.2.0--hfc31af2_0' :
'biocontainers/msisensor-pro:1.2.0--hfc31af2_0' }"
diff --git a/modules/nf-core/msisensorpro/scan/meta.yml b/modules/nf-core/msisensorpro/scan/meta.yml
index 47f4d3529f..aec743ede5 100644
--- a/modules/nf-core/msisensorpro/scan/meta.yml
+++ b/modules/nf-core/msisensorpro/scan/meta.yml
@@ -12,7 +12,6 @@ tools:
tool_dev_url: https://github.com/xjtu-omics/msisensor-pro
doi: "10.1016/j.gpb.2020.02.001"
licence: ["Custom Licence"]
-
input:
- meta:
type: map
@@ -23,7 +22,6 @@ input:
type: file
description: Reference genome
pattern: "*.{fasta}"
-
output:
- meta:
type: map
@@ -38,6 +36,7 @@ output:
type: file
description: File containing microsatellite list
pattern: "*.{list}"
-
authors:
- "@FriederikeHanssen"
+maintainers:
+ - "@FriederikeHanssen"
diff --git a/modules/nf-core/multiqc/environment.yml b/modules/nf-core/multiqc/environment.yml
new file mode 100644
index 0000000000..d2a9f21a74
--- /dev/null
+++ b/modules/nf-core/multiqc/environment.yml
@@ -0,0 +1,7 @@
+name: multiqc
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::multiqc=1.17
diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf
index 65d7dd0de1..2bbc3983fa 100644
--- a/modules/nf-core/multiqc/main.nf
+++ b/modules/nf-core/multiqc/main.nf
@@ -1,10 +1,10 @@
process MULTIQC {
label 'process_single'
- conda "bioconda::multiqc=1.15"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/multiqc:1.15--pyhdfd78af_0' :
- 'biocontainers/multiqc:1.15--pyhdfd78af_0' }"
+ 'https://depot.galaxyproject.org/singularity/multiqc:1.17--pyhdfd78af_0' :
+ 'biocontainers/multiqc:1.17--pyhdfd78af_0' }"
input:
path multiqc_files, stageAs: "?/*"
diff --git a/modules/nf-core/multiqc/meta.yml b/modules/nf-core/multiqc/meta.yml
index f93b5ee519..f1aa660eb7 100644
--- a/modules/nf-core/multiqc/meta.yml
+++ b/modules/nf-core/multiqc/meta.yml
@@ -1,5 +1,5 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json
-name: MultiQC
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json
+name: multiqc
description: Aggregate results from bioinformatics analyses across many samples into a single report
keywords:
- QC
@@ -13,7 +13,6 @@ tools:
homepage: https://multiqc.info/
documentation: https://multiqc.info/docs/
licence: ["GPL-3.0-or-later"]
-
input:
- multiqc_files:
type: file
@@ -31,7 +30,6 @@ input:
type: file
description: Optional logo file for MultiQC
pattern: "*.{png}"
-
output:
- report:
type: file
@@ -54,3 +52,8 @@ authors:
- "@bunop"
- "@drpatelh"
- "@jfy133"
+maintainers:
+ - "@abhi18av"
+ - "@bunop"
+ - "@drpatelh"
+ - "@jfy133"
diff --git a/modules/nf-core/ngscheckmate/ncm/environment.yml b/modules/nf-core/ngscheckmate/ncm/environment.yml
new file mode 100644
index 0000000000..bf185fc23e
--- /dev/null
+++ b/modules/nf-core/ngscheckmate/ncm/environment.yml
@@ -0,0 +1,7 @@
+name: ngscheckmate_ncm
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::ngscheckmate=1.0.1
diff --git a/modules/nf-core/ngscheckmate/ncm/main.nf b/modules/nf-core/ngscheckmate/ncm/main.nf
new file mode 100644
index 0000000000..99921ddcc5
--- /dev/null
+++ b/modules/nf-core/ngscheckmate/ncm/main.nf
@@ -0,0 +1,64 @@
+process NGSCHECKMATE_NCM {
+ label 'process_low'
+
+ conda "${moduleDir}/environment.yml"
+ container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
+ 'https://depot.galaxyproject.org/singularity/ngscheckmate:1.0.1--py27pl5321r40hdfd78af_1':
+ 'biocontainers/ngscheckmate:1.0.1--py27pl5321r40hdfd78af_1' }"
+
+ input:
+ tuple val(meta) , path(files)
+ tuple val(meta2), path(snp_bed)
+ tuple val(meta3), path(fasta)
+
+ output:
+ tuple val(meta), path("*_corr_matrix.txt"), emit: corr_matrix
+ tuple val(meta), path("*_matched.txt") , emit: matched
+ tuple val(meta), path("*_all.txt") , emit: all
+ tuple val(meta), path("*.pdf") , emit: pdf, optional: true
+ tuple val(meta), path("*.vcf") , emit: vcf, optional: true
+ path "versions.yml" , emit: versions
+
+ when:
+ task.ext.when == null || task.ext.when
+
+ script:
+ def args = task.ext.args ?: ''
+ def prefix = task.ext.prefix ?: "$meta.id"
+ def unzip = files.any { it.toString().endsWith(".vcf.gz") }
+ """
+ if $unzip
+ then
+ for VCFGZ in *.vcf.gz; do
+ gunzip -cdf \$VCFGZ > \$( basename \$VCFGZ .gz );
+ done
+ fi
+
+ NCM_REF="./"${fasta} ncm.py -d . -bed ${snp_bed} -O . -N ${prefix} $args
+
+ if $unzip
+ then
+ rm -f *.vcf # clean up decompressed vcfs
+ fi
+
+ cat <<-END_VERSIONS > versions.yml
+ "${task.process}":
+ ngscheckmate: \$(ncm.py --help | sed "7!d;s/ *Ensuring Sample Identity v//g")
+ END_VERSIONS
+ """
+
+ stub:
+ def prefix = task.ext.prefix ?: "$meta.id"
+ """
+ touch ${prefix}_output_corr_matrix.txt
+ touch ${prefix}_matched.txt
+ touch ${prefix}_all.txt
+ touch ${prefix}.pdf
+
+ cat <<-END_VERSIONS > versions.yml
+ "${task.process}":
+ ngscheckmate: \$(ncm.py --help | sed "7!d;s/ *Ensuring Sample Identity v//g")
+ END_VERSIONS
+ """
+
+}
diff --git a/modules/nf-core/ngscheckmate/ncm/meta.yml b/modules/nf-core/ngscheckmate/ncm/meta.yml
new file mode 100644
index 0000000000..0defad0064
--- /dev/null
+++ b/modules/nf-core/ngscheckmate/ncm/meta.yml
@@ -0,0 +1,71 @@
+name: ngscheckmate_ncm
+description: Determining whether sequencing data comes from the same individual by using SNP matching. Designed for humans on vcf or bam files.
+keywords:
+ - ngscheckmate
+ - matching
+ - snp
+tools:
+ - ngscheckmate:
+ description: NGSCheckMate is a software package for identifying next generation sequencing (NGS) data files from the same individual, including matching between DNA and RNA.
+ homepage: https://github.com/parklab/NGSCheckMate
+ documentation: https://github.com/parklab/NGSCheckMate
+ tool_dev_url: https://github.com/parklab/NGSCheckMate
+ doi: "10.1093/nar/gkx193"
+ licence: ["MIT"]
+input:
+ - meta:
+ type: map
+ description: |
+ Groovy Map containing sample information
+ e.g. [ id:'test']
+ - files:
+ type: file
+ description: VCF or BAM files for each sample, in a merged channel (possibly gzipped). BAM files require an index too.
+ pattern: "*.{vcf,vcf.gz,bam,bai}"
+ - meta2:
+ type: map
+ description: |
+ Groovy Map containing SNP information
+ e.g. [ id:'test' ]
+ - snp_bed:
+ type: file
+ description: BED file containing the SNPs to analyse
+ pattern: "*.{bed}"
+ - meta3:
+ type: map
+ description: |
+ Groovy Map containing reference fasta index information
+ e.g. [ id:'test' ]
+ - fasta:
+ type: file
+ description: fasta file for the genome, only used in the bam mode
+ pattern: "*.{bed}"
+output:
+ - versions:
+ type: file
+ description: File containing software versions
+ pattern: "versions.yml"
+ - pdf:
+ type: file
+ description: A pdf containing a dendrogram showing how the samples match up
+ pattern: "*.{pdf}"
+ - corr_matrix:
+ type: file
+ description: A text file containing the correlation matrix between each sample
+ pattern: "*corr_matrix.txt"
+ - matched:
+ type: file
+ description: A txt file containing only the samples that match with each other
+ pattern: "*matched.txt"
+ - all:
+ type: file
+ description: A txt file containing all the sample comparisons, whether they match or not
+ pattern: "*all.txt"
+ - vcf:
+ type: file
+ description: If ran in bam mode, vcf files for each sample giving the SNP calls used
+ pattern: "*.vcf"
+authors:
+ - "@sppearce"
+maintainers:
+ - "@sppearce"
diff --git a/modules/nf-core/samblaster/environment.yml b/modules/nf-core/samblaster/environment.yml
new file mode 100644
index 0000000000..f956283ec0
--- /dev/null
+++ b/modules/nf-core/samblaster/environment.yml
@@ -0,0 +1,8 @@
+name: samblaster
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samblaster=0.1.26
+ - bioconda::samtools=1.16.1
diff --git a/modules/nf-core/samblaster/main.nf b/modules/nf-core/samblaster/main.nf
index af1f71d6e5..4622d3691d 100644
--- a/modules/nf-core/samblaster/main.nf
+++ b/modules/nf-core/samblaster/main.nf
@@ -2,7 +2,7 @@ process SAMBLASTER {
tag "$meta.id"
label 'process_low'
- conda "bioconda::samblaster=0.1.26 bioconda::samtools=1.16.1"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:cee56b506ceb753d4bbef7e05b81e1bfc25d937f-0' :
'biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:cee56b506ceb753d4bbef7e05b81e1bfc25d937f-0' }"
diff --git a/modules/nf-core/samblaster/meta.yml b/modules/nf-core/samblaster/meta.yml
index f090c89816..ccb48320c1 100644
--- a/modules/nf-core/samblaster/meta.yml
+++ b/modules/nf-core/samblaster/meta.yml
@@ -19,12 +19,10 @@ tools:
It can also optionally output discordant read pairs and/or split read mappings to separate SAM files,
and/or unmapped/clipped reads to a separate FASTQ file.
By default, samblaster reads SAM input from stdin and writes SAM to stdout.
-
documentation: https://github.com/GregoryFaust/samblaster
tool_dev_url: https://github.com/GregoryFaust/samblaster
doi: "10.1093/bioinformatics/btu314"
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -35,7 +33,6 @@ input:
type: file
description: BAM file
pattern: "*.bam"
-
output:
- meta:
type: map
@@ -50,6 +47,7 @@ output:
type: file
description: Tagged or filtered BAM file
pattern: "*.bam"
-
authors:
- "@lescai"
+maintainers:
+ - "@lescai"
diff --git a/modules/nf-core/samtools/bam2fq/environment.yml b/modules/nf-core/samtools/bam2fq/environment.yml
new file mode 100644
index 0000000000..b59ea8e050
--- /dev/null
+++ b/modules/nf-core/samtools/bam2fq/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_bam2fq
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/bam2fq/main.nf b/modules/nf-core/samtools/bam2fq/main.nf
index 858f2ae3d7..016d91d992 100644
--- a/modules/nf-core/samtools/bam2fq/main.nf
+++ b/modules/nf-core/samtools/bam2fq/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_BAM2FQ {
tag "$meta.id"
label 'process_low'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/bam2fq/meta.yml b/modules/nf-core/samtools/bam2fq/meta.yml
index c7ad3ba834..7769046b54 100644
--- a/modules/nf-core/samtools/bam2fq/meta.yml
+++ b/modules/nf-core/samtools/bam2fq/meta.yml
@@ -9,11 +9,8 @@ keywords:
tools:
- samtools:
description: Tools for dealing with SAM, BAM and CRAM files
-
documentation: http://www.htslib.org/doc/1.1/samtools.html
-
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -32,7 +29,6 @@ input:
Note: choosing TRUE will generate 4 different files.
Choosing FALSE will produce a single file, which will be interleaved in case
the input contains paired reads.
-
output:
- meta:
type: map
@@ -49,6 +45,7 @@ output:
FASTQ files, which will be either a group of 4 files (read_1, read_2, other and singleton)
or a single interleaved .fq.gz file if the user chooses not to split the reads.
pattern: "*.fq.gz"
-
authors:
- "@lescai"
+maintainers:
+ - "@lescai"
diff --git a/modules/nf-core/samtools/collatefastq/environment.yml b/modules/nf-core/samtools/collatefastq/environment.yml
new file mode 100644
index 0000000000..ec3faa9cdc
--- /dev/null
+++ b/modules/nf-core/samtools/collatefastq/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_collatefastq
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/collatefastq/main.nf b/modules/nf-core/samtools/collatefastq/main.nf
index 4469fafcca..537b88cca4 100644
--- a/modules/nf-core/samtools/collatefastq/main.nf
+++ b/modules/nf-core/samtools/collatefastq/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_COLLATEFASTQ {
tag "$meta.id"
label 'process_low'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/collatefastq/meta.yml b/modules/nf-core/samtools/collatefastq/meta.yml
index b647cba454..898cdbdad7 100644
--- a/modules/nf-core/samtools/collatefastq/meta.yml
+++ b/modules/nf-core/samtools/collatefastq/meta.yml
@@ -9,11 +9,8 @@ keywords:
tools:
- samtools:
description: Tools for dealing with SAM, BAM and CRAM files
-
documentation: http://www.htslib.org/doc/1.1/samtools.html
-
licence: ["MIT"]
-
input:
- meta:
type: map
@@ -69,8 +66,11 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@lescai"
- "@maxulysse"
- "@matthdsm"
+maintainers:
+ - "@lescai"
+ - "@maxulysse"
+ - "@matthdsm"
diff --git a/modules/nf-core/samtools/convert/environment.yml b/modules/nf-core/samtools/convert/environment.yml
new file mode 100644
index 0000000000..b2150ad507
--- /dev/null
+++ b/modules/nf-core/samtools/convert/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_convert
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/convert/main.nf b/modules/nf-core/samtools/convert/main.nf
index 29722ba787..ddf17d2dee 100644
--- a/modules/nf-core/samtools/convert/main.nf
+++ b/modules/nf-core/samtools/convert/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_CONVERT {
tag "$meta.id"
label 'process_low'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/convert/meta.yml b/modules/nf-core/samtools/convert/meta.yml
index 866c228fef..f8a1612fc2 100644
--- a/modules/nf-core/samtools/convert/meta.yml
+++ b/modules/nf-core/samtools/convert/meta.yml
@@ -50,3 +50,6 @@ output:
authors:
- "@FriederikeHanssen"
- "@maxulysse"
+maintainers:
+ - "@FriederikeHanssen"
+ - "@maxulysse"
diff --git a/modules/nf-core/samtools/faidx/environment.yml b/modules/nf-core/samtools/faidx/environment.yml
new file mode 100644
index 0000000000..73badedb18
--- /dev/null
+++ b/modules/nf-core/samtools/faidx/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_faidx
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/faidx/main.nf b/modules/nf-core/samtools/faidx/main.nf
index 59ed308876..3aa988224e 100644
--- a/modules/nf-core/samtools/faidx/main.nf
+++ b/modules/nf-core/samtools/faidx/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_FAIDX {
tag "$fasta"
label 'process_single'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/faidx/meta.yml b/modules/nf-core/samtools/faidx/meta.yml
index 957b25e52b..e189af28fd 100644
--- a/modules/nf-core/samtools/faidx/meta.yml
+++ b/modules/nf-core/samtools/faidx/meta.yml
@@ -55,3 +55,7 @@ authors:
- "@drpatelh"
- "@ewels"
- "@phue"
+maintainers:
+ - "@drpatelh"
+ - "@ewels"
+ - "@phue"
diff --git a/modules/nf-core/samtools/index/environment.yml b/modules/nf-core/samtools/index/environment.yml
new file mode 100644
index 0000000000..3c6f95b25a
--- /dev/null
+++ b/modules/nf-core/samtools/index/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_index
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/index/main.nf b/modules/nf-core/samtools/index/main.nf
index 0b20aa4bb8..256bd7c469 100644
--- a/modules/nf-core/samtools/index/main.nf
+++ b/modules/nf-core/samtools/index/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_INDEX {
tag "$meta.id"
label 'process_low'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/index/meta.yml b/modules/nf-core/samtools/index/meta.yml
index 8bd2fa6fb4..01a4ee03eb 100644
--- a/modules/nf-core/samtools/index/meta.yml
+++ b/modules/nf-core/samtools/index/meta.yml
@@ -51,3 +51,7 @@ authors:
- "@drpatelh"
- "@ewels"
- "@maxulysse"
+maintainers:
+ - "@drpatelh"
+ - "@ewels"
+ - "@maxulysse"
diff --git a/modules/nf-core/samtools/merge/environment.yml b/modules/nf-core/samtools/merge/environment.yml
new file mode 100644
index 0000000000..0d437d8a88
--- /dev/null
+++ b/modules/nf-core/samtools/merge/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_merge
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/merge/main.nf b/modules/nf-core/samtools/merge/main.nf
index b73b7cb2a9..21f785cfdc 100644
--- a/modules/nf-core/samtools/merge/main.nf
+++ b/modules/nf-core/samtools/merge/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_MERGE {
tag "$meta.id"
label 'process_low'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
@@ -16,6 +16,7 @@ process SAMTOOLS_MERGE {
tuple val(meta), path("${prefix}.bam") , optional:true, emit: bam
tuple val(meta), path("${prefix}.cram"), optional:true, emit: cram
tuple val(meta), path("*.csi") , optional:true, emit: csi
+ tuple val(meta), path("*.crai") , optional:true, emit: crai
path "versions.yml" , emit: versions
diff --git a/modules/nf-core/samtools/merge/meta.yml b/modules/nf-core/samtools/merge/meta.yml
index 3a815f74b9..2e8f3dbbb5 100644
--- a/modules/nf-core/samtools/merge/meta.yml
+++ b/modules/nf-core/samtools/merge/meta.yml
@@ -65,9 +65,19 @@ output:
type: file
description: BAM index file (optional)
pattern: "*.csi"
+ - crai:
+ type: file
+ description: CRAM index file (optional)
+ pattern: "*.crai"
authors:
- "@drpatelh"
- "@yuukiiwa "
- "@maxulysse"
- "@FriederikeHanssen"
- "@ramprasadn"
+maintainers:
+ - "@drpatelh"
+ - "@yuukiiwa "
+ - "@maxulysse"
+ - "@FriederikeHanssen"
+ - "@ramprasadn"
diff --git a/modules/nf-core/samtools/mpileup/environment.yml b/modules/nf-core/samtools/mpileup/environment.yml
new file mode 100644
index 0000000000..5f06050d19
--- /dev/null
+++ b/modules/nf-core/samtools/mpileup/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_mpileup
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/mpileup/main.nf b/modules/nf-core/samtools/mpileup/main.nf
index d772498415..ed102582c4 100644
--- a/modules/nf-core/samtools/mpileup/main.nf
+++ b/modules/nf-core/samtools/mpileup/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_MPILEUP {
tag "$meta.id"
label 'process_single'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/mpileup/meta.yml b/modules/nf-core/samtools/mpileup/meta.yml
index 7597ef41ab..13038fbc9b 100644
--- a/modules/nf-core/samtools/mpileup/meta.yml
+++ b/modules/nf-core/samtools/mpileup/meta.yml
@@ -50,3 +50,6 @@ output:
authors:
- "@drpatelh"
- "@joseespinosa"
+maintainers:
+ - "@drpatelh"
+ - "@joseespinosa"
diff --git a/modules/nf-core/samtools/stats/environment.yml b/modules/nf-core/samtools/stats/environment.yml
new file mode 100644
index 0000000000..ed4e8961af
--- /dev/null
+++ b/modules/nf-core/samtools/stats/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_stats
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/stats/main.nf b/modules/nf-core/samtools/stats/main.nf
index 4a2607ded0..07286ef410 100644
--- a/modules/nf-core/samtools/stats/main.nf
+++ b/modules/nf-core/samtools/stats/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_STATS {
tag "$meta.id"
label 'process_single'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/stats/meta.yml b/modules/nf-core/samtools/stats/meta.yml
index 90e6345f53..735ff8122a 100644
--- a/modules/nf-core/samtools/stats/meta.yml
+++ b/modules/nf-core/samtools/stats/meta.yml
@@ -57,3 +57,7 @@ authors:
- "@drpatelh"
- "@FriederikeHanssen"
- "@ramprasadn"
+maintainers:
+ - "@drpatelh"
+ - "@FriederikeHanssen"
+ - "@ramprasadn"
diff --git a/modules/nf-core/samtools/stats/tests/main.nf.test b/modules/nf-core/samtools/stats/tests/main.nf.test
new file mode 100644
index 0000000000..e037132ca2
--- /dev/null
+++ b/modules/nf-core/samtools/stats/tests/main.nf.test
@@ -0,0 +1,78 @@
+nextflow_process {
+
+ name "Test Process SAMTOOLS_STATS"
+ script "../main.nf"
+ process "SAMTOOLS_STATS"
+ tag "modules"
+ tag "modules/nf-core"
+ tag "samtools"
+ tag "samtools/stats"
+
+ test("SAMTOOLS STATS Should run without failures") {
+
+ when {
+ params {
+
+ outdir = "$outputDir"
+ }
+ process {
+ """
+ // define inputs of the process here.
+ input[0] = [
+ [ id:'test', single_end:false ], // meta map
+ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
+ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
+
+ ]
+ input[1] = [[],[]]
+ """
+
+ }
+ }
+
+ then {
+ assertAll(
+ {assert process.success},
+ {assert snapshot(process.out).match()}
+ )
+ }
+
+ }
+
+ test("SAMTOOLS CRAM Should run without failures") {
+
+ when {
+ params {
+
+ outdir = "$outputDir"
+ }
+ process {
+ """
+ // define inputs of the process here
+ input[0] = [
+ [ id:'test', single_end:false ], // meta map
+ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
+ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)
+
+ ]
+ input[1] = [
+ [ id:'genome' ],
+ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
+ ]
+ """
+ }
+
+
+ }
+
+ then {
+ assertAll(
+ {assert process.success},
+ {assert snapshot(process.out).match()}
+ )
+ }
+
+ }
+
+
+}
diff --git a/modules/nf-core/samtools/stats/tests/main.nf.test.snap b/modules/nf-core/samtools/stats/tests/main.nf.test.snap
new file mode 100644
index 0000000000..516b2b0192
--- /dev/null
+++ b/modules/nf-core/samtools/stats/tests/main.nf.test.snap
@@ -0,0 +1,64 @@
+{
+ "SAMTOOLS STATS Should run without failures": {
+ "content": [
+ {
+ "0": [
+ [
+ {
+ "id": "test",
+ "single_end": false
+ },
+ "test.stats:md5,6e768486d5df0257351c5419a79f9c9b"
+ ]
+ ],
+ "1": [
+ "versions.yml:md5,08035f3409d934d47a416150884bb0df"
+ ],
+ "stats": [
+ [
+ {
+ "id": "test",
+ "single_end": false
+ },
+ "test.stats:md5,6e768486d5df0257351c5419a79f9c9b"
+ ]
+ ],
+ "versions": [
+ "versions.yml:md5,08035f3409d934d47a416150884bb0df"
+ ]
+ }
+ ],
+ "timestamp": "2023-10-18T12:12:42.998746"
+ },
+ "SAMTOOLS CRAM Should run without failures": {
+ "content": [
+ {
+ "0": [
+ [
+ {
+ "id": "test",
+ "single_end": false
+ },
+ "test.stats:md5,7c9ee5747793cceb9d6f4d733345641a"
+ ]
+ ],
+ "1": [
+ "versions.yml:md5,08035f3409d934d47a416150884bb0df"
+ ],
+ "stats": [
+ [
+ {
+ "id": "test",
+ "single_end": false
+ },
+ "test.stats:md5,7c9ee5747793cceb9d6f4d733345641a"
+ ]
+ ],
+ "versions": [
+ "versions.yml:md5,08035f3409d934d47a416150884bb0df"
+ ]
+ }
+ ],
+ "timestamp": "2023-10-18T12:13:30.747222"
+ }
+}
\ No newline at end of file
diff --git a/modules/nf-core/samtools/stats/tests/tags.yml b/modules/nf-core/samtools/stats/tests/tags.yml
new file mode 100644
index 0000000000..7c28e30f3f
--- /dev/null
+++ b/modules/nf-core/samtools/stats/tests/tags.yml
@@ -0,0 +1,2 @@
+samtools/stats:
+ - modules/nf-core/samtools/stats/**
diff --git a/modules/nf-core/samtools/view/environment.yml b/modules/nf-core/samtools/view/environment.yml
new file mode 100644
index 0000000000..141e7bd829
--- /dev/null
+++ b/modules/nf-core/samtools/view/environment.yml
@@ -0,0 +1,7 @@
+name: samtools_view
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::samtools=1.17
diff --git a/modules/nf-core/samtools/view/main.nf b/modules/nf-core/samtools/view/main.nf
index cb91facf8c..ddf3f88ae5 100644
--- a/modules/nf-core/samtools/view/main.nf
+++ b/modules/nf-core/samtools/view/main.nf
@@ -2,7 +2,7 @@ process SAMTOOLS_VIEW {
tag "$meta.id"
label 'process_low'
- conda "bioconda::samtools=1.17"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.17--h00cdaf9_0' :
'biocontainers/samtools:1.17--h00cdaf9_0' }"
diff --git a/modules/nf-core/samtools/view/meta.yml b/modules/nf-core/samtools/view/meta.yml
index 3b05450b2e..3dadafae75 100644
--- a/modules/nf-core/samtools/view/meta.yml
+++ b/modules/nf-core/samtools/view/meta.yml
@@ -82,3 +82,8 @@ authors:
- "@joseespinosa"
- "@FriederikeHanssen"
- "@priyanka-surana"
+maintainers:
+ - "@drpatelh"
+ - "@joseespinosa"
+ - "@FriederikeHanssen"
+ - "@priyanka-surana"
diff --git a/modules/nf-core/sentieon/applyvarcal/environment.yml b/modules/nf-core/sentieon/applyvarcal/environment.yml
new file mode 100644
index 0000000000..b7b5169a89
--- /dev/null
+++ b/modules/nf-core/sentieon/applyvarcal/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_applyvarcal
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/applyvarcal/meta.yml b/modules/nf-core/sentieon/applyvarcal/meta.yml
index bbe23ea375..da92ce3436 100644
--- a/modules/nf-core/sentieon/applyvarcal/meta.yml
+++ b/modules/nf-core/sentieon/applyvarcal/meta.yml
@@ -17,7 +17,6 @@ tools:
Our software improves upon BWA, STAR, Minimap2, GATK, HaplotypeCaller, Mutect, and Mutect2 based pipelines and is deployable on any generic-CPU-based computing system.
homepage: https://www.sentieon.com/
documentation: https://www.sentieon.com/
-
input:
- meta:
type: map
@@ -82,3 +81,5 @@ output:
pattern: "versions.yml"
authors:
- "@assp8200"
+maintainers:
+ - "@assp8200"
diff --git a/modules/nf-core/sentieon/bwamem/environment.yml b/modules/nf-core/sentieon/bwamem/environment.yml
new file mode 100644
index 0000000000..efd460cfa4
--- /dev/null
+++ b/modules/nf-core/sentieon/bwamem/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_bwamem
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/bwamem/meta.yml b/modules/nf-core/sentieon/bwamem/meta.yml
index 9987e8c900..0859a923ca 100644
--- a/modules/nf-core/sentieon/bwamem/meta.yml
+++ b/modules/nf-core/sentieon/bwamem/meta.yml
@@ -71,3 +71,5 @@ output:
pattern: "versions.yml"
authors:
- "@asp8200"
+maintainers:
+ - "@asp8200"
diff --git a/modules/nf-core/sentieon/dedup/environment.yml b/modules/nf-core/sentieon/dedup/environment.yml
new file mode 100644
index 0000000000..bda2b1385a
--- /dev/null
+++ b/modules/nf-core/sentieon/dedup/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_dedup
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/dedup/meta.yml b/modules/nf-core/sentieon/dedup/meta.yml
index ec0565d923..0efbb96c22 100644
--- a/modules/nf-core/sentieon/dedup/meta.yml
+++ b/modules/nf-core/sentieon/dedup/meta.yml
@@ -86,3 +86,5 @@ output:
pattern: "versions.yml"
authors:
- "@asp8200"
+maintainers:
+ - "@asp8200"
diff --git a/modules/nf-core/sentieon/dnamodelapply/environment.yml b/modules/nf-core/sentieon/dnamodelapply/environment.yml
new file mode 100644
index 0000000000..3f92a79ab8
--- /dev/null
+++ b/modules/nf-core/sentieon/dnamodelapply/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_dnamodelapply
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/dnamodelapply/meta.yml b/modules/nf-core/sentieon/dnamodelapply/meta.yml
index ec429bea21..2507654577 100644
--- a/modules/nf-core/sentieon/dnamodelapply/meta.yml
+++ b/modules/nf-core/sentieon/dnamodelapply/meta.yml
@@ -1,4 +1,4 @@
-name: "sentieon_dnamodelapply"
+name: sentieon_dnamodelapply
description: modifies the input VCF file by adding the MLrejected FILTER to the variants
keywords:
- dnamodelapply
@@ -12,7 +12,6 @@ tools:
Our software improves upon BWA, STAR, Minimap2, GATK, HaplotypeCaller, Mutect, and Mutect2 based pipelines and is deployable on any generic-CPU-based computing system.
homepage: https://www.sentieon.com/
documentation: https://www.sentieon.com/
-
input:
- meta:
type: map
@@ -54,7 +53,6 @@ input:
type: file
description: machine learning model file
pattern: "*.model"
-
output:
- meta:
type: map
@@ -73,6 +71,7 @@ output:
type: file
description: Index of the input VCF file
pattern: "*.{tbi}"
-
authors:
- "@ramprasadn"
+maintainers:
+ - "@ramprasadn"
diff --git a/modules/nf-core/sentieon/dnascope/environment.yml b/modules/nf-core/sentieon/dnascope/environment.yml
new file mode 100644
index 0000000000..2c5b4937bf
--- /dev/null
+++ b/modules/nf-core/sentieon/dnascope/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_dnascope
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/dnascope/meta.yml b/modules/nf-core/sentieon/dnascope/meta.yml
index 34e0b97b4c..6b61cee828 100644
--- a/modules/nf-core/sentieon/dnascope/meta.yml
+++ b/modules/nf-core/sentieon/dnascope/meta.yml
@@ -114,6 +114,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@ramprasadn"
+maintainers:
+ - "@ramprasadn"
diff --git a/modules/nf-core/sentieon/gvcftyper/environment.yml b/modules/nf-core/sentieon/gvcftyper/environment.yml
new file mode 100644
index 0000000000..5af2aaa8e8
--- /dev/null
+++ b/modules/nf-core/sentieon/gvcftyper/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_gvcftyper
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/gvcftyper/meta.yml b/modules/nf-core/sentieon/gvcftyper/meta.yml
index c373b2c5cc..5a83eb0308 100644
--- a/modules/nf-core/sentieon/gvcftyper/meta.yml
+++ b/modules/nf-core/sentieon/gvcftyper/meta.yml
@@ -1,4 +1,4 @@
-name: SENTIEON_GVCFTYPER
+name: sentieon_gvcftyper
description: |
Perform joint genotyping on one or more samples pre-called with Sentieon's Haplotyper.
keywords:
@@ -47,7 +47,6 @@ input:
type: file
description: dbSNP VCF index file
pattern: "*.tbi"
-
output:
- meta:
type: map
@@ -68,3 +67,5 @@ output:
pattern: "versions.yml"
authors:
- "@asp8200"
+maintainers:
+ - "@asp8200"
diff --git a/modules/nf-core/sentieon/haplotyper/environment.yml b/modules/nf-core/sentieon/haplotyper/environment.yml
new file mode 100644
index 0000000000..5d8e4c58f9
--- /dev/null
+++ b/modules/nf-core/sentieon/haplotyper/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_haplotyper
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/haplotyper/meta.yml b/modules/nf-core/sentieon/haplotyper/meta.yml
index 33217808f8..c248db3fca 100644
--- a/modules/nf-core/sentieon/haplotyper/meta.yml
+++ b/modules/nf-core/sentieon/haplotyper/meta.yml
@@ -80,3 +80,5 @@ output:
pattern: "versions.yml"
authors:
- "@asp8200"
+maintainers:
+ - "@asp8200"
diff --git a/modules/nf-core/sentieon/varcal/environment.yml b/modules/nf-core/sentieon/varcal/environment.yml
new file mode 100644
index 0000000000..f04a8b78ce
--- /dev/null
+++ b/modules/nf-core/sentieon/varcal/environment.yml
@@ -0,0 +1,5 @@
+name: sentieon_varcal
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
diff --git a/modules/nf-core/sentieon/varcal/meta.yml b/modules/nf-core/sentieon/varcal/meta.yml
index 5de37f6ad1..cad7ee106f 100644
--- a/modules/nf-core/sentieon/varcal/meta.yml
+++ b/modules/nf-core/sentieon/varcal/meta.yml
@@ -70,3 +70,5 @@ output:
pattern: "*.versions.yml"
authors:
- "@asp8200"
+maintainers:
+ - "@asp8200"
diff --git a/modules/nf-core/snpeff/download/environment.yml b/modules/nf-core/snpeff/download/environment.yml
new file mode 100644
index 0000000000..62f3d5aad6
--- /dev/null
+++ b/modules/nf-core/snpeff/download/environment.yml
@@ -0,0 +1,7 @@
+name: snpeff_download
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::snpeff=5.1
diff --git a/modules/nf-core/snpeff/download/main.nf b/modules/nf-core/snpeff/download/main.nf
index 9a3a0d3167..f1fc4cc395 100644
--- a/modules/nf-core/snpeff/download/main.nf
+++ b/modules/nf-core/snpeff/download/main.nf
@@ -2,7 +2,7 @@ process SNPEFF_DOWNLOAD {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::snpeff=5.1"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/snpeff:5.1--hdfd78af_2' :
'biocontainers/snpeff:5.1--hdfd78af_2' }"
diff --git a/modules/nf-core/snpeff/download/meta.yml b/modules/nf-core/snpeff/download/meta.yml
index 3c03c2f602..f367c69664 100644
--- a/modules/nf-core/snpeff/download/meta.yml
+++ b/modules/nf-core/snpeff/download/meta.yml
@@ -1,4 +1,4 @@
-name: SNPEFF_DOWNLOAD
+name: snpeff_download
description: Genetic variant annotation and functional effect prediction toolbox
keywords:
- annotation
@@ -39,3 +39,5 @@ output:
pattern: "versions.yml"
authors:
- "@maxulysse"
+maintainers:
+ - "@maxulysse"
diff --git a/modules/nf-core/snpeff/snpeff/environment.yml b/modules/nf-core/snpeff/snpeff/environment.yml
new file mode 100644
index 0000000000..b492e6a88e
--- /dev/null
+++ b/modules/nf-core/snpeff/snpeff/environment.yml
@@ -0,0 +1,7 @@
+name: snpeff_snpeff
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::snpeff=5.1
diff --git a/modules/nf-core/snpeff/snpeff/main.nf b/modules/nf-core/snpeff/snpeff/main.nf
index e92c1597e7..cc4f2ccb36 100644
--- a/modules/nf-core/snpeff/snpeff/main.nf
+++ b/modules/nf-core/snpeff/snpeff/main.nf
@@ -2,7 +2,7 @@ process SNPEFF_SNPEFF {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::snpeff=5.1"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/snpeff:5.1--hdfd78af_2' :
'biocontainers/snpeff:5.1--hdfd78af_2' }"
diff --git a/modules/nf-core/snpeff/snpeff/meta.yml b/modules/nf-core/snpeff/snpeff/meta.yml
index 44bada2303..7559c3de08 100644
--- a/modules/nf-core/snpeff/snpeff/meta.yml
+++ b/modules/nf-core/snpeff/snpeff/meta.yml
@@ -1,4 +1,4 @@
-name: SNPEFF_SNPEFF
+name: snpeff_snpeff
description: Genetic variant annotation and functional effect prediction toolbox
keywords:
- annotation
@@ -56,3 +56,5 @@ output:
pattern: "versions.yml"
authors:
- "@maxulysse"
+maintainers:
+ - "@maxulysse"
diff --git a/modules/nf-core/strelka/germline/environment.yml b/modules/nf-core/strelka/germline/environment.yml
new file mode 100644
index 0000000000..23bd165b21
--- /dev/null
+++ b/modules/nf-core/strelka/germline/environment.yml
@@ -0,0 +1,7 @@
+name: strelka_germline
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::strelka=2.9.10
diff --git a/modules/nf-core/strelka/germline/main.nf b/modules/nf-core/strelka/germline/main.nf
index 74712df16b..8f93356160 100644
--- a/modules/nf-core/strelka/germline/main.nf
+++ b/modules/nf-core/strelka/germline/main.nf
@@ -3,7 +3,7 @@ process STRELKA_GERMLINE {
label 'process_medium'
label 'error_retry'
- conda "bioconda::strelka=2.9.10"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1' :
'biocontainers/strelka:2.9.10--h9ee0642_1' }"
diff --git a/modules/nf-core/strelka/germline/meta.yml b/modules/nf-core/strelka/germline/meta.yml
index c119532d16..6ee656683e 100644
--- a/modules/nf-core/strelka/germline/meta.yml
+++ b/modules/nf-core/strelka/germline/meta.yml
@@ -14,7 +14,6 @@ tools:
tool_dev_url: https://github.com/Illumina/strelka
doi: 10.1038/s41592-018-0051-x
licence: ["GPL v3"]
-
input:
- meta:
type: map
@@ -61,3 +60,5 @@ output:
pattern: "versions.yml"
authors:
- "@arontommi"
+maintainers:
+ - "@arontommi"
diff --git a/modules/nf-core/strelka/somatic/environment.yml b/modules/nf-core/strelka/somatic/environment.yml
new file mode 100644
index 0000000000..ecbc865ec9
--- /dev/null
+++ b/modules/nf-core/strelka/somatic/environment.yml
@@ -0,0 +1,7 @@
+name: strelka_somatic
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::strelka=2.9.10
diff --git a/modules/nf-core/strelka/somatic/main.nf b/modules/nf-core/strelka/somatic/main.nf
index 17d700c136..dd975bd563 100644
--- a/modules/nf-core/strelka/somatic/main.nf
+++ b/modules/nf-core/strelka/somatic/main.nf
@@ -3,7 +3,7 @@ process STRELKA_SOMATIC {
label 'process_medium'
label 'error_retry'
- conda "bioconda::strelka=2.9.10"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1' :
'biocontainers/strelka:2.9.10--h9ee0642_1' }"
diff --git a/modules/nf-core/strelka/somatic/meta.yml b/modules/nf-core/strelka/somatic/meta.yml
index b2a2550462..6032cd6d5f 100644
--- a/modules/nf-core/strelka/somatic/meta.yml
+++ b/modules/nf-core/strelka/somatic/meta.yml
@@ -14,7 +14,6 @@ tools:
tool_dev_url: https://github.com/Illumina/strelka
doi: 10.1038/s41592-018-0051-x
licence: ["GPL v3"]
-
input:
- meta:
type: map
@@ -61,7 +60,6 @@ input:
type: file
description: Index for BED file containing target regions for variant calling
pattern: "*.{bed.tbi}"
-
output:
- meta:
type: map
@@ -88,6 +86,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@drpatelh"
+maintainers:
+ - "@drpatelh"
diff --git a/modules/nf-core/svdb/merge/environment.yml b/modules/nf-core/svdb/merge/environment.yml
new file mode 100644
index 0000000000..4aad50da52
--- /dev/null
+++ b/modules/nf-core/svdb/merge/environment.yml
@@ -0,0 +1,9 @@
+name: svdb_merge
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - svdb=2.8.1
+ # renovate: datasource=conda depName=bioconda/samtools
+ - samtools=1.16.1
diff --git a/modules/nf-core/svdb/merge/main.nf b/modules/nf-core/svdb/merge/main.nf
index 0bd94499ab..0d9967dda9 100644
--- a/modules/nf-core/svdb/merge/main.nf
+++ b/modules/nf-core/svdb/merge/main.nf
@@ -1,7 +1,7 @@
process SVDB_MERGE {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::svdb=2.8.1 bioconda::samtools=1.16.1"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-c8daa8f9d69d3c5a1a4ff08283a166c18edb0000:af6f8534cd538a85ff43a2eae1b52b143e7abd05-0':
'biocontainers/mulled-v2-c8daa8f9d69d3c5a1a4ff08283a166c18edb0000:af6f8534cd538a85ff43a2eae1b52b143e7abd05-0' }"
diff --git a/modules/nf-core/svdb/merge/meta.yml b/modules/nf-core/svdb/merge/meta.yml
index 92a5a128ea..84265acb84 100644
--- a/modules/nf-core/svdb/merge/meta.yml
+++ b/modules/nf-core/svdb/merge/meta.yml
@@ -39,3 +39,5 @@ output:
pattern: "*_sv_merge.vcf.gz"
authors:
- "@ramprasadn"
+maintainers:
+ - "@ramprasadn"
diff --git a/modules/nf-core/tabix/bgziptabix/environment.yml b/modules/nf-core/tabix/bgziptabix/environment.yml
new file mode 100644
index 0000000000..028461c987
--- /dev/null
+++ b/modules/nf-core/tabix/bgziptabix/environment.yml
@@ -0,0 +1,7 @@
+name: tabix_bgziptabix
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::tabix=1.11
diff --git a/modules/nf-core/tabix/bgziptabix/main.nf b/modules/nf-core/tabix/bgziptabix/main.nf
index d6c5a760f3..f9482690ba 100644
--- a/modules/nf-core/tabix/bgziptabix/main.nf
+++ b/modules/nf-core/tabix/bgziptabix/main.nf
@@ -2,7 +2,7 @@ process TABIX_BGZIPTABIX {
tag "$meta.id"
label 'process_single'
- conda "bioconda::tabix=1.11"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' :
'biocontainers/tabix:1.11--hdfd78af_0' }"
diff --git a/modules/nf-core/tabix/bgziptabix/meta.yml b/modules/nf-core/tabix/bgziptabix/meta.yml
index 2761e27183..438aba4d18 100644
--- a/modules/nf-core/tabix/bgziptabix/meta.yml
+++ b/modules/nf-core/tabix/bgziptabix/meta.yml
@@ -48,3 +48,6 @@ output:
authors:
- "@maxulysse"
- "@DLBPointon"
+maintainers:
+ - "@maxulysse"
+ - "@DLBPointon"
diff --git a/modules/nf-core/tabix/tabix/environment.yml b/modules/nf-core/tabix/tabix/environment.yml
new file mode 100644
index 0000000000..7167fb87d6
--- /dev/null
+++ b/modules/nf-core/tabix/tabix/environment.yml
@@ -0,0 +1,7 @@
+name: tabix_tabix
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::tabix=1.11
diff --git a/modules/nf-core/tabix/tabix/main.nf b/modules/nf-core/tabix/tabix/main.nf
index 5bf332ef84..c304a8a34b 100644
--- a/modules/nf-core/tabix/tabix/main.nf
+++ b/modules/nf-core/tabix/tabix/main.nf
@@ -2,7 +2,7 @@ process TABIX_TABIX {
tag "$meta.id"
label 'process_single'
- conda "bioconda::tabix=1.11"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' :
'biocontainers/tabix:1.11--hdfd78af_0' }"
diff --git a/modules/nf-core/tabix/tabix/meta.yml b/modules/nf-core/tabix/tabix/meta.yml
index fcc6e52463..ae5b4f439f 100644
--- a/modules/nf-core/tabix/tabix/meta.yml
+++ b/modules/nf-core/tabix/tabix/meta.yml
@@ -43,3 +43,7 @@ authors:
- "@joseespinosa"
- "@drpatelh"
- "@maxulysse"
+maintainers:
+ - "@joseespinosa"
+ - "@drpatelh"
+ - "@maxulysse"
diff --git a/modules/nf-core/tiddit/sv/environment.yml b/modules/nf-core/tiddit/sv/environment.yml
new file mode 100644
index 0000000000..d0367f1717
--- /dev/null
+++ b/modules/nf-core/tiddit/sv/environment.yml
@@ -0,0 +1,7 @@
+name: tiddit_sv
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::tiddit=3.6.1
diff --git a/modules/nf-core/tiddit/sv/main.nf b/modules/nf-core/tiddit/sv/main.nf
index 67a0670dbc..0f4bc7cb52 100644
--- a/modules/nf-core/tiddit/sv/main.nf
+++ b/modules/nf-core/tiddit/sv/main.nf
@@ -2,7 +2,7 @@ process TIDDIT_SV {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::tiddit=3.6.1"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/tiddit:3.6.1--py38h24c8ff8_0' :
'biocontainers/tiddit:3.6.1--py38h24c8ff8_0' }"
diff --git a/modules/nf-core/tiddit/sv/meta.yml b/modules/nf-core/tiddit/sv/meta.yml
index 8b41c69cf5..b13ae5cdcb 100644
--- a/modules/nf-core/tiddit/sv/meta.yml
+++ b/modules/nf-core/tiddit/sv/meta.yml
@@ -53,3 +53,5 @@ output:
pattern: "versions.yml"
authors:
- "@maxulysse"
+maintainers:
+ - "@maxulysse"
diff --git a/modules/nf-core/untar/environment.yml b/modules/nf-core/untar/environment.yml
new file mode 100644
index 0000000000..d6917da326
--- /dev/null
+++ b/modules/nf-core/untar/environment.yml
@@ -0,0 +1,9 @@
+name: untar
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - conda-forge::sed=4.7
+ - conda-forge::grep=3.11
+ - conda-forge::tar=1.34
diff --git a/modules/nf-core/untar/main.nf b/modules/nf-core/untar/main.nf
index 61461c3917..8a75bb957d 100644
--- a/modules/nf-core/untar/main.nf
+++ b/modules/nf-core/untar/main.nf
@@ -2,7 +2,7 @@ process UNTAR {
tag "$archive"
label 'process_single'
- conda "conda-forge::sed=4.7 conda-forge::grep=3.11 conda-forge::tar=1.34"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/ubuntu:20.04' :
'nf-core/ubuntu:20.04' }"
diff --git a/modules/nf-core/untar/meta.yml b/modules/nf-core/untar/meta.yml
index db241a6e5e..a9a2110f55 100644
--- a/modules/nf-core/untar/meta.yml
+++ b/modules/nf-core/untar/meta.yml
@@ -39,3 +39,8 @@ authors:
- "@drpatelh"
- "@matthdsm"
- "@jfy133"
+maintainers:
+ - "@joseespinosa"
+ - "@drpatelh"
+ - "@matthdsm"
+ - "@jfy133"
diff --git a/modules/nf-core/untar/tests/main.nf.test b/modules/nf-core/untar/tests/main.nf.test
new file mode 100644
index 0000000000..d40db13d82
--- /dev/null
+++ b/modules/nf-core/untar/tests/main.nf.test
@@ -0,0 +1,77 @@
+nextflow_process {
+
+ name "Test Process UNTAR"
+ script "../main.nf"
+ process "UNTAR"
+
+ tag "modules"
+ tag "modules_nfcore"
+ tag "untar"
+
+ test("test_untar") {
+
+ when {
+ params {
+ outdir = "$outputDir"
+ }
+ process {
+ """
+ input[0] = [ [], file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) ]
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+ { assert snapshot(process.out.untar).match("test_untar") },
+ )
+ }
+
+ }
+
+ test("test_untar_different_output_path") {
+
+ when {
+ params {
+ outdir = "$outputDir"
+ }
+ process {
+ """
+ input[0] = [ [], file(params.test_data['homo_sapiens']['illumina']['test_flowcell'], checkIfExists: true) ]
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+ { assert snapshot(process.out.untar).match("test_untar_different_output_path") },
+ )
+ }
+
+ }
+
+ test("test_untar_onlyfiles") {
+
+ when {
+ params {
+ outdir = "$outputDir"
+ }
+ process {
+ """
+ input[0] = [ [], file(params.test_data['generic']['tar']['tar_gz'], checkIfExists: true) ]
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+ { assert snapshot(process.out.untar).match("test_untar_onlyfiles") },
+ )
+ }
+
+ }
+
+}
diff --git a/modules/nf-core/untar/tests/main.nf.test.snap b/modules/nf-core/untar/tests/main.nf.test.snap
new file mode 100644
index 0000000000..146c8678a7
--- /dev/null
+++ b/modules/nf-core/untar/tests/main.nf.test.snap
@@ -0,0 +1,513 @@
+{
+ "test_untar_different_output_path": {
+ "content": [
+ [
+ [
+ [
+
+ ],
+ [
+ [
+ [
+ [
+ [
+ [
+ "s_1_1101.bcl:md5,ad01889e2ff43e2f194224e20bdb600c",
+ "s_1_1101.stats:md5,4bbbf103454b37fbc3138fadf1b4446b"
+ ],
+ [
+ "s_1_1101.bcl:md5,565384bbe67a694dfd690bae6d1d30c2",
+ "s_1_1101.stats:md5,55e5abd8f129ff38ef169873547abdb8"
+ ],
+ [
+ "s_1_1101.bcl:md5,650fa58a630a9148835ba79e323d4237",
+ "s_1_1101.stats:md5,77403669ca1b05340c390dff64425c1e"
+ ],
+ [
+ "s_1_1101.bcl:md5,54471c9e97299cd141e202e204637702",
+ "s_1_1101.stats:md5,67b14c9a89b7f8556674a7524d5cfb2d"
+ ],
+ [
+ "s_1_1101.bcl:md5,74e4f929fc7476c380fd9d741ddb6700",
+ "s_1_1101.stats:md5,5730a4c35463eaa12a06b6758710b98c"
+ ],
+ [
+ "s_1_1101.bcl:md5,c785f472f4350c120c02c888c8189590",
+ "s_1_1101.stats:md5,fee4ec63895ea81007e06ee6a36ba5e0"
+ ],
+ [
+ "s_1_1101.bcl:md5,b7ea50bb25f08d43c301741d77050a9b",
+ "s_1_1101.stats:md5,fa7c68f3122c74d14364e6f7b011af70"
+ ],
+ [
+ "s_1_1101.bcl:md5,9d5087dc4bcae39d66486363d4f68ecf",
+ "s_1_1101.stats:md5,23cdceee4d82c4b8e7c60018b9276ace"
+ ],
+ [
+ "s_1_1101.bcl:md5,581e0c5ee94e8f2de14b2b1d8e777530",
+ "s_1_1101.stats:md5,9a3536d573c97f66bb56b49463612607"
+ ],
+ [
+ "s_1_1101.bcl:md5,296fc026bb34c67bbe2b44845fe0d1de",
+ "s_1_1101.stats:md5,a7f57a7770fb9c5ae2a0fb1ef403ec4f"
+ ],
+ [
+ "s_1_1101.bcl:md5,2a3ca15531556c36d10d132a9e051de8",
+ "s_1_1101.stats:md5,2d0bcdb0a1b51d3d79e415db2ab2d3b1"
+ ],
+ [
+ "s_1_1101.bcl:md5,1150d46a2ccd4ac58aee0585d3e4ffd7",
+ "s_1_1101.stats:md5,2e97550bd5b5864ffd0565bb7a3f6d40"
+ ],
+ [
+ "s_1_1101.bcl:md5,0b85c4b3da0de95e7b862d849c5333ae",
+ "s_1_1101.stats:md5,6eab9746fbeb783b0cd70398f44e0c1a"
+ ],
+ [
+ "s_1_1101.bcl:md5,e0e9c91f4698804d7a6d1058ef68b34f",
+ "s_1_1101.stats:md5,790022cdc7878a02b2ebd166e1ddf0a7"
+ ],
+ [
+ "s_1_1101.bcl:md5,38cd0ad4de359e651c8ac0d5777ea625",
+ "s_1_1101.stats:md5,a1b1d5ea5371d326abb029774483c5e6"
+ ],
+ [
+ "s_1_1101.bcl:md5,b0ddc05c4012ccba24e712a1cfec748f",
+ "s_1_1101.stats:md5,af3d232f839d720f76f40ba06caa2987"
+ ],
+ [
+ "s_1_1101.bcl:md5,af32fcc5dc3b836cf7a5ba3db85a75dd",
+ "s_1_1101.stats:md5,f93f2c09bd4e486c74a5f6e2040f7296"
+ ],
+ [
+ "s_1_1101.bcl:md5,54b7428e037ca87816107647d4a3d9db",
+ "s_1_1101.stats:md5,e5ac77a72cd7bed5e9bf03cccda0e48c"
+ ],
+ [
+ "s_1_1101.bcl:md5,fc8b4eacd493bf3d0b20bc23998dc7ff",
+ "s_1_1101.stats:md5,190315e159e2f4bc4c057ded7470dc52"
+ ],
+ [
+ "s_1_1101.bcl:md5,9484ecffda489927fce424ac6a44fa9d",
+ "s_1_1101.stats:md5,0825feeb457ecc9efcf6f8526ba32311"
+ ],
+ [
+ "s_1_1101.bcl:md5,eec59e21036e31c95ce1e847bfb0a9c4",
+ "s_1_1101.stats:md5,9acc13f63c98e5a8445e7be70d49222b"
+ ],
+ [
+ "s_1_1101.bcl:md5,a9fb24476f87cba4fba68e2b3c3f2c07",
+ "s_1_1101.stats:md5,dc0aa7db9790733291c3e6480ca2a0fc"
+ ],
+ [
+ "s_1_1101.bcl:md5,ed950b3e82c500927c2e236c9df005c6",
+ "s_1_1101.stats:md5,dccb71ec47d1f9d33a192da6d5660a45"
+ ],
+ [
+ "s_1_1101.bcl:md5,b3e992025e995ca56b5ea2820144ef47",
+ "s_1_1101.stats:md5,a6a829bf2cffb26ac5d9dc3012057699"
+ ],
+ [
+ "s_1_1101.bcl:md5,89edc726a5a4e0b4ff8ca3899ed0232b",
+ "s_1_1101.stats:md5,5b9b4fd8110577a59b82d0c419519d29"
+ ],
+ [
+ "s_1_1101.bcl:md5,4dc696149169f232c451225f563cb5cd",
+ "s_1_1101.stats:md5,d3514a71ea3adc60e2943c6b8f6e2598"
+ ],
+ [
+ "s_1_1101.bcl:md5,35b992d0318afb7c825ceaa31b0755e6",
+ "s_1_1101.stats:md5,2826093acc175c16c3795de7c4ca8f07"
+ ],
+ [
+ "s_1_1101.bcl:md5,7bc927f56a362e49c00b5d76ee048901",
+ "s_1_1101.stats:md5,e47d862b795fd6b88a31d7d482ab22f6"
+ ],
+ [
+ "s_1_1101.bcl:md5,84742233ff2a651626fe9036f27f7cb2",
+ "s_1_1101.stats:md5,b78fad11d3c50bc76b722cdc03e3028b"
+ ],
+ [
+ "s_1_1101.bcl:md5,3935341c86263a7938e8c49620ef39f8",
+ "s_1_1101.stats:md5,cc6585b2daac5354073d150874da9704"
+ ],
+ [
+ "s_1_1101.bcl:md5,3627f4fd548bf6e64aaf08fba3a342be",
+ "s_1_1101.stats:md5,120ae4831ae004ff7d16728aef36e82f"
+ ],
+ [
+ "s_1_1101.bcl:md5,07631014bc35124149fabd80ef19f933",
+ "s_1_1101.stats:md5,eadd63d91f47cc6db6b6f0a967a23927"
+ ],
+ [
+ "s_1_1101.bcl:md5,a1149c80415dc2f34d768eeb397c43fb",
+ "s_1_1101.stats:md5,ca89a9def67611a9151c6ce685b7cce1"
+ ],
+ [
+ "s_1_1101.bcl:md5,eb5f71d4741d2f40618756bc72eaf8b4",
+ "s_1_1101.stats:md5,90f48501e735e5915b843478e23d1ae2"
+ ],
+ [
+ "s_1_1101.bcl:md5,9bf270fe3f6add1a591ebc24fff10078",
+ "s_1_1101.stats:md5,a4e429671d4098034293c638aa655e16"
+ ],
+ [
+ "s_1_1101.bcl:md5,219bedcbd24bae54fe4cf05dae05282c",
+ "s_1_1101.stats:md5,dd97525b65b68207137d51fcf19132c7"
+ ],
+ [
+ "s_1_1101.bcl:md5,5163bc00a68fd57ae50cae0b76350892",
+ "s_1_1101.stats:md5,b606a5368eff1f012f3ea5d11ccdf2e0"
+ ],
+ [
+ "s_1_1101.bcl:md5,fc429195a5af59a59e0cc4c48e6c05ea",
+ "s_1_1101.stats:md5,d809aa19698053f90d639da4dcad8008"
+ ],
+ [
+ "s_1_1101.bcl:md5,383340219a1dd77076a092a64a71a7e4",
+ "s_1_1101.stats:md5,b204a5cf256378679ffc906c15cc1bae"
+ ],
+ [
+ "s_1_1101.bcl:md5,0c369540d3e24696cf1f9c55bab69315",
+ "s_1_1101.stats:md5,a2bc69a4031a22ce9621dcc623a0bf4b"
+ ],
+ [
+ "s_1_1101.bcl:md5,3127abc8016ba8eb954f8f8015dff387",
+ "s_1_1101.stats:md5,5deafff31150b7bf757f814e49a53bc2"
+ ],
+ [
+ "s_1_1101.bcl:md5,045f40c82de676bafec3d59f91376a7a",
+ "s_1_1101.stats:md5,890700edc20687c090ef52248c7884b1"
+ ],
+ [
+ "s_1_1101.bcl:md5,78af269aa2b39a1d765703f0a4739a86",
+ "s_1_1101.stats:md5,303cf457aa1543a8208544f694cbc531"
+ ],
+ [
+ "s_1_1101.bcl:md5,0ab8c781959b783b62888e9274364a46",
+ "s_1_1101.stats:md5,2605b0e8322f83aa4d0dae5da4ec7a7a"
+ ],
+ [
+ "s_1_1101.bcl:md5,d0cf823ffe352e8b3f75d589544ab617",
+ "s_1_1101.stats:md5,efa3c0e01e3db71e12fd961cb2d03739"
+ ],
+ [
+ "s_1_1101.bcl:md5,db4ca4ab7a01e03c246f9160c3758d82",
+ "s_1_1101.stats:md5,f61550d9e4a90df6b860e68f41f82f60"
+ ],
+ [
+ "s_1_1101.bcl:md5,1af39a2c7e5ff20ece91cb8160b51d17",
+ "s_1_1101.stats:md5,d0e20879afcaf6dfcd88c73f1c5c78cf"
+ ],
+ [
+ "s_1_1101.bcl:md5,4cf7123bb0fffcd79266df03aef01665",
+ "s_1_1101.stats:md5,29bff4075109a121b087116b58d7e927"
+ ],
+ [
+ "s_1_1101.bcl:md5,aa9980428cb60cd6320f4b48f4dd0d74",
+ "s_1_1101.stats:md5,6b0e20bde93133117a8d1a6df3d6f37b"
+ ],
+ [
+ "s_1_1101.bcl:md5,0f6e440374e15b9b491d52fb83a8adfe",
+ "s_1_1101.stats:md5,55cb5eb0ecdabd23dca39ab8c4607598"
+ ],
+ [
+ "s_1_1101.bcl:md5,2c645d7bdaddaa403f6e304d36df9e4b",
+ "s_1_1101.stats:md5,53acf33d21f832779b400c2447386ce4"
+ ],
+ [
+ "s_1_1101.bcl:md5,3bbf0863b423b770c879203644420206",
+ "s_1_1101.stats:md5,579bdc7293cac8c3d7407249cacf4c25"
+ ],
+ [
+ "s_1_1101.bcl:md5,6658a08409e81d29cfeb2d096b491985",
+ "s_1_1101.stats:md5,bb559ffbea46d612f9933cefa84c4c03"
+ ],
+ [
+ "s_1_1101.bcl:md5,1700d9a13d3d4f7643af2943ef838acb",
+ "s_1_1101.stats:md5,f01cb6050ebfb15da1e0399ebd791eb4"
+ ],
+ [
+ "s_1_1101.bcl:md5,1ac7aa9ffae25eb103f755f33e4a39c6",
+ "s_1_1101.stats:md5,0b9d45d7929ccf336d5e5b95373ed3c2"
+ ],
+ [
+ "s_1_1101.bcl:md5,812a97af2e983a53226e18c75190b06c",
+ "s_1_1101.stats:md5,d2410c7b0e506dab2972e77e2398de1e"
+ ],
+ [
+ "s_1_1101.bcl:md5,c981e8e4dcc434956c2b86159da268bc",
+ "s_1_1101.stats:md5,e9c826e85361ce673f1f248786c9a611"
+ ],
+ [
+ "s_1_1101.bcl:md5,88e09e99a0a4ef3357b203a41b22f77c",
+ "s_1_1101.stats:md5,ef06f2e5ad667bbd383f9ed6a05b7b42"
+ ],
+ [
+ "s_1_1101.bcl:md5,461c8b146fc8a7938be38689978ecd09",
+ "s_1_1101.stats:md5,65115693935da66f9791b27136e22fb0"
+ ],
+ [
+ "s_1_1101.bcl:md5,c7b827df5ce20e0f21916fe60860ca3f",
+ "s_1_1101.stats:md5,87be73613aeb507847f94d3cac5bb30a"
+ ],
+ [
+ "s_1_1101.bcl:md5,7c4cc3dc9c8a1b0f15917b282dfb40ce",
+ "s_1_1101.stats:md5,bdd9181fa89debbfafe7b6ea3e064065"
+ ],
+ [
+ "s_1_1101.bcl:md5,19f4debaf91e118aca8934517179ac33",
+ "s_1_1101.stats:md5,1143082719e136241d21b14a6b19b8a2"
+ ],
+ [
+ "s_1_1101.bcl:md5,38aa256ad2d697d84b0b2c0e876a3eba",
+ "s_1_1101.stats:md5,64dd82f03df23f7f437eede2671ed4fe"
+ ],
+ [
+ "s_1_1101.bcl:md5,b7929970378949571fed922c1b8cab32",
+ "s_1_1101.stats:md5,3d6d7985a41629fe196e4342d7fe36aa"
+ ],
+ [
+ "s_1_1101.bcl:md5,fb2ed0bf6e89d79624ee78754e773491",
+ "s_1_1101.stats:md5,f34940810ff255aee79953496a12716d"
+ ],
+ [
+ "s_1_1101.bcl:md5,4f8a8311f5f9c3a7629c1a973a7b280e",
+ "s_1_1101.stats:md5,4fd7cd28c09f4e152e7c2ad1ab541cd2"
+ ],
+ [
+ "s_1_1101.bcl:md5,9eb46c903d0344e25af51f88cc311d60",
+ "s_1_1101.stats:md5,df3abd5f620d9e7f99496098d9fd3f7f"
+ ],
+ [
+ "s_1_1101.bcl:md5,3ecbc17f3660e2014b58d7fe70ae62d5",
+ "s_1_1101.stats:md5,8e89a13c85a6d6ab3ccd251b66d1f165"
+ ],
+ [
+ "s_1_1101.bcl:md5,5d59cc2499a77791233a64f73fe82894",
+ "s_1_1101.stats:md5,32ec99cd400f4b80cb26e2fa8e07ece0"
+ ],
+ [
+ "s_1_1101.bcl:md5,1c052da47b9ae8554388f0fa3aade482",
+ "s_1_1101.stats:md5,d23f438772673688aa7bc92421dc6dce"
+ ],
+ [
+ "s_1_1101.bcl:md5,1a52bd4f23130c0c96bc967ccd448a2b",
+ "s_1_1101.stats:md5,9b597e3388d59ef1f61aba30ac90ea79"
+ ],
+ [
+ "s_1_1101.bcl:md5,8a1e84b79cf3f80794c20e3a0cc84688",
+ "s_1_1101.stats:md5,9561f7b6ef4b1849afc72b2bb49792bd"
+ ],
+ [
+ "s_1_1101.bcl:md5,75c00111051f3fa95d04286823cb9109",
+ "s_1_1101.stats:md5,1fe786cdf8181767deafbd60b3c76610"
+ ],
+ [
+ "s_1_1101.bcl:md5,529255d8deee0873ed5565e6d1a2ebda",
+ "s_1_1101.stats:md5,3fa7f467e97a75880f32d17b7429d316"
+ ],
+ [
+ "s_1_1101.bcl:md5,ea4d960e3d9355d2149da71b88a21df4",
+ "s_1_1101.stats:md5,2540fe65586e8e800c1ddd8cddd1e8cd"
+ ],
+ [
+ "s_1_1101.bcl:md5,0dfe1fd92a2dce2f23119aa483429744",
+ "s_1_1101.stats:md5,78257b2169fb9f0cf40966e06e847e86"
+ ],
+ [
+ "s_1_1101.bcl:md5,f692ddc9aa3ab849271d07c666d0b3b9",
+ "s_1_1101.stats:md5,aa2ec6a3e3a9c116e34fe74a21e6459e"
+ ],
+ [
+ "s_1_1101.bcl:md5,29cc4c239eae7c871c9a1adf92ebdb98",
+ "s_1_1101.stats:md5,263184813090acd740a5bf25304aed3a"
+ ],
+ [
+ "s_1_1101.bcl:md5,e005af6a84925e326afbfe264241f047",
+ "s_1_1101.stats:md5,b6fb20868eebaffcc19daa694a449795"
+ ],
+ [
+ "s_1_1101.bcl:md5,02f1a699b1ba9967accccf99a7af3d24",
+ "s_1_1101.stats:md5,4f007efacecaf26dc0e0231aede28754"
+ ],
+ [
+ "s_1_1101.bcl:md5,df308c72a2dcc655cd95e98f5457187a",
+ "s_1_1101.stats:md5,130c4b07f4c14030bab012824cbe34da"
+ ],
+ [
+ "s_1_1101.bcl:md5,f3ce10d8d2406b72355023bfa8c96822",
+ "s_1_1101.stats:md5,2638f4db393ed5b699ec2ce59ff0ec19"
+ ],
+ [
+ "s_1_1101.bcl:md5,cc2f6d675ad1593ff96f734b172d249e",
+ "s_1_1101.stats:md5,f5b13f1e1ababc9e1a7a73b0b993cbf1"
+ ],
+ [
+ "s_1_1101.bcl:md5,7938a0b21448305a951b023b1845b3a7",
+ "s_1_1101.stats:md5,fcd57511adabfc3ba1ac045165330006"
+ ],
+ [
+ "s_1_1101.bcl:md5,44879bc6a38df1fee8def61868115041",
+ "s_1_1101.stats:md5,517e20e4b58a8023a37f9af62e0e2036"
+ ],
+ [
+ "s_1_1101.bcl:md5,8749611e62406a7d2f34c610a55e56af",
+ "s_1_1101.stats:md5,8ccf24b3676ef84f2e513be8f2a9f3d1"
+ ],
+ [
+ "s_1_1101.bcl:md5,a9846a037611cda3721958088f714c0e",
+ "s_1_1101.stats:md5,6438fa5a1892f328cab1605a95d80a3b"
+ ],
+ [
+ "s_1_1101.bcl:md5,d6c4a2a726496476eb826532f974ed5f",
+ "s_1_1101.stats:md5,8c2c65b5e8b00dbf61ada65252aeb266"
+ ],
+ [
+ "s_1_1101.bcl:md5,be3dde6cae7dd85855a6bf295ebfacfe",
+ "s_1_1101.stats:md5,93bc13f3b0749b2b8d8bcb0b1199f4f0"
+ ],
+ [
+ "s_1_1101.bcl:md5,7c64514735a6cf1565b60647edd17d20",
+ "s_1_1101.stats:md5,4a0aa6c49b24f876415e5878cef7f805"
+ ],
+ [
+ "s_1_1101.bcl:md5,3983b4043bc9df4b505202a5134ccf03",
+ "s_1_1101.stats:md5,1c9d9a8558adc1279ca27c96bc1b9758"
+ ],
+ [
+ "s_1_1101.bcl:md5,a0b8d77f116ec95975f9253dcb768136",
+ "s_1_1101.stats:md5,c3992b786756e7ec42f65ef4b13b50d4"
+ ],
+ [
+ "s_1_1101.bcl:md5,43c95ba35d06bb7c57fbd16f3d1cfd6c",
+ "s_1_1101.stats:md5,3cb69d04698c39f97f962e5bf1eea7f0"
+ ],
+ [
+ "s_1_1101.bcl:md5,3dbeea0cad7052f19f53ff6f19dd4d90",
+ "s_1_1101.stats:md5,58bbc8254f0f5f4a244531e8e9c12a04"
+ ],
+ [
+ "s_1_1101.bcl:md5,da56d088996376c898d855b6cd0a7dfc",
+ "s_1_1101.stats:md5,9f2d78af6908ce1576b89cdc059844ff"
+ ],
+ [
+ "s_1_1101.bcl:md5,7b641a5565f095e9a6ffcad9e4305033",
+ "s_1_1101.stats:md5,3ada06c59b4fb41b83ab6abd0979e9fc"
+ ],
+ [
+ "s_1_1101.bcl:md5,a3843d397a01d51657825bb652c191e5",
+ "s_1_1101.stats:md5,19341e52a4bfc7d9d48e9d2acc68c519"
+ ],
+ [
+ "s_1_1101.bcl:md5,048e3ebfc8efeb8012def6b741c9060d",
+ "s_1_1101.stats:md5,88bd38deca1e87d700effab1fd099565"
+ ],
+ [
+ "s_1_1101.bcl:md5,b340db0e07e829dd5da22371916a1a9e",
+ "s_1_1101.stats:md5,e44cfaddcc4ffb968e5b1a2f41ac48a5"
+ ],
+ [
+ "s_1_1101.bcl:md5,e6011ec6eabbc2b8792deb283c621ce0",
+ "s_1_1101.stats:md5,090875dcd1a431af24bc631333f089c4"
+ ],
+ [
+ "s_1_1101.bcl:md5,a08f216e3352345031ed100ec4245082",
+ "s_1_1101.stats:md5,97b949ef4b96219e1369f673cf5f8a6c"
+ ],
+ [
+ "s_1_1101.bcl:md5,b43337c76fb037dfcf5f8f7bcb3618e5",
+ "s_1_1101.stats:md5,ddef585805e79951f69d23ab7354f69b"
+ ],
+ [
+ "s_1_1101.bcl:md5,8c61fd004104397b360855e058bbf1bf",
+ "s_1_1101.stats:md5,0f8d253816d594dcfea3ccf48c826401"
+ ],
+ [
+ "s_1_1101.bcl:md5,594d06310d328b188aa0b3edfff22cb2",
+ "s_1_1101.stats:md5,3160bf271b39aeb7590e4fd2984710ba"
+ ],
+ [
+ "s_1_1101.bcl:md5,4c9eada67c9d55437211d83e111961d5",
+ "s_1_1101.stats:md5,2901b46ab16ec4863d30e4c84ec29c97"
+ ],
+ [
+ "s_1_1101.bcl:md5,e03971ae5282f0accc0c1b7374d9ef1b",
+ "s_1_1101.stats:md5,60d2a19ce59bf70a21a28555484cead8"
+ ],
+ [
+ "s_1_1101.bcl:md5,e1c6f7a06e63d149895d3e48e63df155",
+ "s_1_1101.stats:md5,44beb10af847ea3dddaf06dda7031126"
+ ],
+ [
+ "s_1_1101.bcl:md5,960a99bf29a8f9d936e9b8582d46c9c6",
+ "s_1_1101.stats:md5,544cd1a7aaaa841914b40ece43399334"
+ ],
+ [
+ "s_1_1101.bcl:md5,5706679f349fd4a6b6313bc2c41c7a42",
+ "s_1_1101.stats:md5,627eea844b26dae033848c2f9f69177b"
+ ],
+ [
+ "s_1_1101.bcl:md5,21da5abc4b0402bbac14b5ab998b0b4f",
+ "s_1_1101.stats:md5,515bd140b095ad90473ca7a9a69877ab"
+ ],
+ "s_1_1101.control:md5,08a72e2198ae95150718e8adf011d105",
+ "s_1_1101.filter:md5,3a72bc73b323c8cb0ac5bfeb62d98989"
+ ]
+ ],
+ [
+ "s_1_1101.locs:md5,0827ea802e5257cc5b20e757a33d4c98"
+ ],
+ "RTAConfiguration.xml:md5,c7d6e257bc374f142dc64b9d2281d4c9",
+ "config.xml:md5,9a4cc7ec01fefa2f1ce9bcb45bbad6e9"
+ ]
+ ],
+ [
+ "ControlMetricsOut.bin:md5,6d77b38d0793a6e1ce1e85706e488953",
+ "CorrectedIntMetricsOut.bin:md5,2bbf84d3be72734addaa2fe794711434",
+ "ErrorMetricsOut.bin:md5,38c88def138e9bb832539911affdb286",
+ "ExtractionMetricsOut.bin:md5,7497c3178837eea8f09350b5cd252e99",
+ "IndexMetricsOut.bin:md5,d41d8cd98f00b204e9800998ecf8427e",
+ "QMetricsOut.bin:md5,7e9f198d53ebdfbb699a5f94cf1ed51c",
+ "TileMetricsOut.bin:md5,83891751ec1c91a425a524b476b6ca3c"
+ ],
+ "RunInfo.xml:md5,03038959f4dd181c86bc97ae71fe270a"
+ ]
+ ]
+ ]
+ ],
+ "timestamp": "2023-10-18T11:56:39.562418"
+ },
+ "test_untar_onlyfiles": {
+ "content": [
+ [
+ [
+ [
+
+ ],
+ [
+ "hello.txt:md5,e59ff97941044f85df5297e1c302d260"
+ ]
+ ]
+ ]
+ ],
+ "timestamp": "2023-10-18T11:56:46.878844"
+ },
+ "test_untar": {
+ "content": [
+ [
+ [
+ [
+
+ ],
+ [
+ "hash.k2d:md5,8b8598468f54a7087c203ad0190555d9",
+ "opts.k2d:md5,a033d00cf6759407010b21700938f543",
+ "taxo.k2d:md5,094d5891cdccf2f1468088855c214b2c"
+ ]
+ ]
+ ]
+ ],
+ "timestamp": "2023-10-18T11:56:08.16574"
+ }
+}
\ No newline at end of file
diff --git a/modules/nf-core/untar/tests/tags.yml b/modules/nf-core/untar/tests/tags.yml
new file mode 100644
index 0000000000..feb6f15c0c
--- /dev/null
+++ b/modules/nf-core/untar/tests/tags.yml
@@ -0,0 +1,2 @@
+untar:
+ - modules/nf-core/untar/**
diff --git a/modules/nf-core/unzip/environment.yml b/modules/nf-core/unzip/environment.yml
new file mode 100644
index 0000000000..d3a535f170
--- /dev/null
+++ b/modules/nf-core/unzip/environment.yml
@@ -0,0 +1,7 @@
+name: unzip
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - conda-forge::p7zip=16.02
diff --git a/modules/nf-core/unzip/main.nf b/modules/nf-core/unzip/main.nf
index cf977f1dcb..08cfc3c406 100644
--- a/modules/nf-core/unzip/main.nf
+++ b/modules/nf-core/unzip/main.nf
@@ -2,7 +2,7 @@ process UNZIP {
tag "$archive"
label 'process_single'
- conda "conda-forge::p7zip=16.02"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/p7zip:16.02' :
'biocontainers/p7zip:16.02' }"
diff --git a/modules/nf-core/unzip/meta.yml b/modules/nf-core/unzip/meta.yml
index 2910e0fd3f..e8e377e2af 100644
--- a/modules/nf-core/unzip/meta.yml
+++ b/modules/nf-core/unzip/meta.yml
@@ -12,7 +12,6 @@ tools:
documentation: https://sourceforge.net/projects/p7zip/
tool_dev_url: https://sourceforge.net/projects/p7zip"
licence: ["LGPL-2.1-or-later"]
-
input:
- meta:
type: map
@@ -23,7 +22,6 @@ input:
type: file
description: ZIP file
pattern: "*.zip"
-
output:
- meta:
type: map
@@ -38,6 +36,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@jfy133"
+maintainers:
+ - "@jfy133"
diff --git a/modules/nf-core/vcftools/environment.yml b/modules/nf-core/vcftools/environment.yml
new file mode 100644
index 0000000000..503449e833
--- /dev/null
+++ b/modules/nf-core/vcftools/environment.yml
@@ -0,0 +1,7 @@
+name: vcftools
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::vcftools=0.1.16
diff --git a/modules/nf-core/vcftools/main.nf b/modules/nf-core/vcftools/main.nf
index cf6d296281..0153a60891 100644
--- a/modules/nf-core/vcftools/main.nf
+++ b/modules/nf-core/vcftools/main.nf
@@ -2,7 +2,7 @@ process VCFTOOLS {
tag "$meta.id"
label 'process_single'
- conda "bioconda::vcftools=0.1.16"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/vcftools:0.1.16--he513fc3_4' :
'biocontainers/vcftools:0.1.16--he513fc3_4' }"
diff --git a/modules/nf-core/vcftools/meta.yml b/modules/nf-core/vcftools/meta.yml
index 04b786f0e6..f361db4a8f 100644
--- a/modules/nf-core/vcftools/meta.yml
+++ b/modules/nf-core/vcftools/meta.yml
@@ -9,7 +9,6 @@ tools:
homepage: http://vcftools.sourceforge.net/
documentation: http://vcftools.sourceforge.net/man_latest.html
licence: ["LGPL"]
-
input:
- meta:
type: map
@@ -25,7 +24,6 @@ input:
- diff_variant_file:
type: file
description: secondary variant file which can be used with the 'diff' suite of tools (optional)
-
output:
- meta:
type: map
@@ -288,6 +286,7 @@ output:
type: file
description: Switch errors found between sites (optional)
pattern: "*.diff.switch"
-
authors:
- "@Mark-S-Hill"
+maintainers:
+ - "@Mark-S-Hill"
diff --git a/nextflow.config b/nextflow.config
index 217bbf5842..ea9c34d807 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -15,7 +15,9 @@ params {
// References
genome = 'GATK.GRCh38'
- igenomes_base = 's3://ngi-igenomes/igenomes'
+ igenomes_base = 's3://ngi-igenomes/igenomes/'
+ snpeff_cache = 's3://annotation-cache/snpeff_cache/'
+ vep_cache = 's3://annotation-cache/vep_cache/'
igenomes_ignore = false
save_reference = false // Built references not saved
build_only_index = false // Only build the reference indexes
@@ -69,32 +71,32 @@ params {
joint_germline = false // g.vcf & joint germline calling are not run by default if HaplotypeCaller is selected
joint_mutect2 = false // if true, enables patient-wise multi-sample somatic variant calling
only_paired_variant_calling = false // if true, skips germline variant calling for normal-paired sample
- sentieon_dnascope_emit_mode = "variant" // default value for Sentieon dnascope
- sentieon_dnascope_model = "s3://ngi-igenomes/igenomes/Homo_sapiens/GATK/GRCh38/Annotation/Sentieon/SentieonDNAscopeModel1.1.model"
- sentieon_dnascope_pcr_indel_model = "CONSERVATIVE"
- sentieon_haplotyper_emit_mode = "variant" // default value for Sentieon haplotyper
+ sentieon_dnascope_emit_mode = 'variant' // default value for Sentieon dnascope
+ sentieon_dnascope_pcr_indel_model = 'CONSERVATIVE'
+ sentieon_haplotyper_emit_mode = 'variant' // default value for Sentieon haplotyper
wes = false // Set to true, if data is exome/targeted sequencing data. Used to use correct models in various variant callers
// Annotation
- dbnsfp = null // No dbnsfp processed file
- dbnsfp_consequence = null // No default consequence for dbnsfp plugin
- dbnsfp_fields = "rs_dbSNP,HGVSc_VEP,HGVSp_VEP,1000Gp3_EAS_AF,1000Gp3_AMR_AF,LRT_score,GERP++_RS,gnomAD_exomes_AF" // Default fields for dbnsfp plugin
- dbnsfp_tbi = null // No dbnsfp processed file index
- outdir_cache = null // No default outdir cache
- snpeff_cache = 's3://annotation-cache/snpeff_cache/'
- spliceai_indel = null // No spliceai_indel file
- spliceai_indel_tbi = null // No spliceai_indel file index
- spliceai_snv = null // No spliceai_snv file
- spliceai_snv_tbi = null // No spliceai_snv file index
- use_annotation_cache_keys = false
- vep_cache = 's3://annotation-cache/vep_cache/'
- vep_custom_args = "--everything --filter_common --per_gene --total_length --offline --format vcf" // Default arguments for VEP
- vep_dbnsfp = null // dbnsfp plugin disabled within VEP
- vep_include_fasta = false // Don't use fasta file for annotation with VEP
- vep_loftee = null // loftee plugin disabled within VEP
- vep_out_format = "vcf"
- vep_spliceai = null // spliceai plugin disabled within VEP
- vep_spliceregion = null // spliceregion plugin disabled within VEP
+ bcftools_annotations = null // No extra annotation file
+ bcftools_annotations_tbi = null // No extra annotation file index
+ bcftools_header_lines = null // No header lines to be added to the VCF file
+ dbnsfp = null // No dbnsfp processed file
+ dbnsfp_consequence = null // No default consequence for dbnsfp plugin
+ dbnsfp_fields = "rs_dbSNP,HGVSc_VEP,HGVSp_VEP,1000Gp3_EAS_AF,1000Gp3_AMR_AF,LRT_score,GERP++_RS,gnomAD_exomes_AF" // Default fields for dbnsfp plugin
+ dbnsfp_tbi = null // No dbnsfp processed file index
+ outdir_cache = null // No default outdir cache
+ spliceai_indel = null // No spliceai_indel file
+ spliceai_indel_tbi = null // No spliceai_indel file index
+ spliceai_snv = null // No spliceai_snv file
+ spliceai_snv_tbi = null // No spliceai_snv file index
+ vep_custom_args = "--everything --filter_common --per_gene --total_length --offline --format vcf" // Default arguments for VEP
+ vep_dbnsfp = null // dbnsfp plugin disabled within VEP
+ vep_include_fasta = false // Don't use fasta file for annotation with VEP
+ vep_loftee = null // loftee plugin disabled within VEP
+ vep_out_format = "vcf"
+ vep_spliceai = null // spliceai plugin disabled within VEP
+ vep_spliceregion = null // spliceregion plugin disabled within VEP
+ vep_version = "110.0-0" // Should be updated when we update VEP, needs this to get full path to some plugins
// MultiQC options
multiqc_config = null
@@ -335,7 +337,11 @@ dag {
}
prov {
enabled = true
- file = "${params.outdir}/pipeline_info/manifest_${trace_timestamp}.bco.json"
+ formats {
+ bco {
+ file = "${params.outdir}/pipeline_info/manifest_${trace_timestamp}.bco.json"
+ }
+ }
}
manifest {
@@ -345,7 +351,7 @@ manifest {
description = """An open-source analysis pipeline to detect germline or somatic variants from whole genome or targeted sequencing"""
mainScript = 'main.nf'
nextflowVersion = '!>=23.04.0'
- version = '3.4dev'
+ version = '3.5dev'
doi = '10.12688/f1000research.16665.2, 10.1101/2023.07.19.549462, 10.5281/zenodo.3476425'
}
@@ -355,7 +361,7 @@ includeConfig 'conf/modules/modules.config'
// Load more modules specific config for DSL2 module specific options
// prepare reference
-includeConfig 'conf/modules/prepare_cache.config'
+includeConfig 'conf/modules/download_cache.config'
includeConfig 'conf/modules/prepare_genome.config'
includeConfig 'conf/modules/prepare_intervals.config'
@@ -369,6 +375,9 @@ includeConfig 'conf/modules/recalibrate.config'
includeConfig 'conf/modules/trimming.config'
includeConfig 'conf/modules/umi.config'
+//ngscheckmate
+includeConfig 'conf/modules/ngscheckmate.config'
+
// variant calling
includeConfig 'conf/modules/ascat.config'
includeConfig 'conf/modules/cnvkit.config'
@@ -388,7 +397,6 @@ includeConfig 'conf/modules/sentieon_haplotyper_joint_germline.config'
includeConfig 'conf/modules/sentieon_tnhaplotyper2.config'
includeConfig 'conf/modules/strelka.config'
includeConfig 'conf/modules/tiddit.config'
-
includeConfig 'conf/modules/post_variant_calling.config'
//annotate
diff --git a/nextflow_schema.json b/nextflow_schema.json
index 15a90d8815..e806139f1d 100644
--- a/nextflow_schema.json
+++ b/nextflow_schema.json
@@ -27,6 +27,7 @@
"input_restart": {
"type": "string",
"description": "Automatic retrieval for restart",
+ "fa_icon": "fas fa-file-csv",
"format": "file-path",
"exists": true,
"mimetype": "text/csv",
@@ -83,7 +84,7 @@
"description": "Path to target bed file in case of whole exome or targeted sequencing or intervals file."
},
"nucleotides_per_second": {
- "type": "number",
+ "type": "integer",
"fa_icon": "fas fa-clock",
"description": "Estimate interval size.",
"help_text": "Intervals are parts of the chopped up genome used to speed up preprocessing and variant calling. See `--intervals` for more info. \n\nChanging this parameter, changes the number of intervals that are grouped and processed together. Bed files from target sequencing can contain thousands or small intervals. Spinning up a new process for each can be quite resource intensive. Instead it can be desired to process small intervals together on larger nodes. \nIn order to make use of this parameter, no runtime estimate can be present in the bed file (column 5). ",
@@ -99,8 +100,8 @@
"type": "string",
"fa_icon": "fas fa-toolbox",
"description": "Tools to use for duplicate marking, variant calling and/or for annotation.",
- "help_text": "Multiple tools separated with commas.\n\n**Variant Calling:**\n\nGermline variant calling can currently be performed with the following variant callers:\n- SNPs/Indels: DeepVariant, FreeBayes, GATK HaplotypeCaller, mpileup, Sentieon Haplotyper, Strelka\n- Structural Variants: Manta, TIDDIT\n- Copy-number: CNVKit\n\nTumor-only somatic variant calling can currently be performed with the following variant callers:\n- SNPs/Indels: FreeBayes, mpileup, Mutect2, Strelka\n- Structural Variants: Manta, TIDDIT\n- Copy-number: CNVKit, ControlFREEC\n\nSomatic variant calling can currently only be performed with the following variant callers:\n- SNPs/Indels: FreeBayes, Mutect2, Strelka2\n- Structural variants: Manta, TIDDIT\n- Copy-Number: ASCAT, CNVKit, Control-FREEC\n- Microsatellite Instability: MSIsensorpro\n\n> **NB** Mutect2 for somatic variant calling cannot be combined with `--no_intervals`\n\n**Annotation:**\n \n- snpEff, VEP, merge (both consecutively).\n\n> **NB** As Sarek will use bgzip and tabix to compress and index VCF files annotated, it expects VCF files to be sorted when starting from `--step annotate`.",
- "pattern": "^((ascat|cnvkit|controlfreec|deepvariant|freebayes|haplotypecaller|manta|merge|mpileup|msisensorpro|mutect2|sentieon_dedup|sentieon_dnascope|sentieon_haplotyper|sentieon_tnhaplotyper2|snpeff|strelka|tiddit|vep)?,?)*(? **NB** Mutect2 for somatic variant calling cannot be combined with `--no_intervals`\n\n**Annotation:**\n \n- snpEff, VEP, merge (both consecutively), and bcftools annotate (needs `--bcftools_annotation`).\n\n> **NB** As Sarek will use bgzip and tabix to compress and index VCF files annotated, it expects VCF files to be sorted when starting from `--step annotate`.",
+ "pattern": "^((ascat|bcfann|cnvkit|controlfreec|deepvariant|freebayes|haplotypecaller|sentieon_dnascope|sentieon_haplotyper|sentieon_tnhaplotyper2|manta|merge|mpileup|msisensorpro|mutect2|ngscheckmate|sentieon_dedup|snpeff|strelka|tiddit|vep)?,?)*(? **NB** PON file should be bgzipped.",
- "hidden": true
- },
- "pon_tbi": {
- "type": "string",
- "fa_icon": "fas fa-file",
- "description": "Index of PON panel-of-normals VCF.",
- "help_text": "If none provided, will be generated automatically from the PON bgzipped VCF file.",
- "hidden": true
- },
"ignore_soft_clipped_bases": {
"type": "boolean",
"fa_icon": "fas fa-ban",
@@ -387,15 +375,17 @@
"sentieon_haplotyper_emit_mode": {
"type": "string",
"default": "variant",
- "fa_icon": "fas fa-toolbox",
"description": "Option for selecting output and emit-mode of Sentieon's Haplotyper.",
+ "fa_icon": "fas fa-toolbox",
"help_text": "The option `--sentieon_haplotyper_emit_mode` can be set to the same string values as the Haplotyper's `--emit_mode`. To output both a vcf and a gvcf, specify both a vcf-option (currently, `all`, `confident` and `variant`) and `gvcf`. For example, to obtain a vcf and gvcf one could set `--sentieon_haplotyper_emit_mode` to `variant, gvcf`.",
+ "hidden": true,
"pattern": "^(all|confident|gvcf|variant|gvcf,all|gvcf,confident|gvcf,variant|all,gvcf|confident,gvcf|variant,gvcf)(? **NB** PON file should be bgzipped.",
+ "hidden": true
+ },
+ "pon_tbi": {
+ "type": "string",
+ "fa_icon": "fas fa-file",
+ "description": "Index of PON panel-of-normals VCF.",
+ "help_text": "If none provided, will be generated automatically from the PON bgzipped VCF file.",
+ "hidden": true
+ },
+ "sentieon_dnascope_model": {
+ "type": "string",
+ "fa_icon": "fas fa-database",
+ "hidden": true,
+ "description": "Machine learning model for Sentieon Dnascope.",
+ "help_text": " It is recommended to use DNAscope with a machine learning model to perform variant calling with higher accuracy by improving the candidate detection and filtering. Sentieon can provide you with a model trained using a subset of the data from the GiAB truth-set found in https://github.com/genome-in-a-bottle. In addition, Sentieon can assist you in the creation of models using your own data, which will calibrate the specifics of your sequencing and bio-informatics processing."
+ },
"snpeff_db": {
"type": "string",
"fa_icon": "fas fa-database",
@@ -745,13 +759,13 @@
"type": "string",
"fa_icon": "fas fa-microscope",
"description": "snpEff genome.",
- "help_text": "If you use AWS iGenomes, this has already been set for you appropriately.\nThis is used to specify the genome when using the container with pre-downloaded cache."
+ "help_text": "If you use AWS iGenomes, this has already been set for you appropriately.\nThis is used to specify the genome when looking for local cache, or cloud based cache."
},
"vep_genome": {
"type": "string",
"fa_icon": "fas fa-microscope",
"description": "VEP genome.",
- "help_text": "If you use AWS iGenomes, this has already been set for you appropriately.\nThis is used to specify the genome when using the container with pre-downloaded cache."
+ "help_text": "If you use AWS iGenomes, this has already been set for you appropriately.\nThis is used to specify the genome when looking for local cache, or cloud based cache."
},
"vep_species": {
"type": "string",
@@ -781,7 +795,7 @@
"type": "boolean",
"fa_icon": "fas fa-download",
"description": "Download annotation cache.",
- "help_text": "Set this parameter, if you wish to download annotation cache."
+ "help_text": "Set this parameter, if you wish to download annotation cache.\nUsing this parameter will download cache even if --snpeff_cache and --vep_cache are provided."
},
"igenomes_base": {
"type": "string",
@@ -795,6 +809,22 @@
"description": "Do not load the iGenomes reference config.",
"fa_icon": "fas fa-ban",
"help_text": "Do not load `igenomes.config` when running the pipeline.\nYou may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`.\n\n> **NB** You can then run `Sarek` by specifying at least a FASTA genome file."
+ },
+ "vep_cache": {
+ "type": "string",
+ "format": "directory-path",
+ "fa_icon": "fas fa-cloud-download-alt",
+ "default": "s3://annotation-cache/vep_cache/",
+ "description": "Path to VEP cache.",
+ "help_text": "Path to VEP cache which should contain the relevant species, genome and build directories at the path ${vep_species}/${vep_genome}_${vep_cache_version}"
+ },
+ "snpeff_cache": {
+ "type": "string",
+ "format": "directory-path",
+ "fa_icon": "fas fa-cloud-download-alt",
+ "default": "s3://annotation-cache/snpeff_cache/",
+ "description": "Path to snpEff cache.",
+ "help_text": "Path to snpEff cache which should contain the relevant genome and build directory in the path ${snpeff_species}.${snpeff_version}"
}
},
"help_text": "The pipeline config files come bundled with paths to the Illumina iGenomes reference index files.\nThe configuration is set up to use the AWS-iGenomes resource\ncf https://ewels.github.io/AWS-iGenomes/."
diff --git a/nf-test.config b/nf-test.config
new file mode 100644
index 0000000000..c60f901961
--- /dev/null
+++ b/nf-test.config
@@ -0,0 +1,6 @@
+config {
+ testsDir "."
+ workDir ".nf-test"
+ configFile "conf/test.config"
+ profile "test"
+}
diff --git a/subworkflows/local/bam_applybqsr_spark/main.nf b/subworkflows/local/bam_applybqsr_spark/main.nf
index 99a63e0976..cfb86ef6a4 100644
--- a/subworkflows/local/bam_applybqsr_spark/main.nf
+++ b/subworkflows/local/bam_applybqsr_spark/main.nf
@@ -4,7 +4,7 @@
// For all modules here:
// A when clause condition is defined in the conf/modules.config to determine if the module should be run
-include { GATK4_APPLYBQSR_SPARK } from '../../../modules/nf-core/gatk4/applybqsrspark/main'
+include { GATK4SPARK_APPLYBQSR } from '../../../modules/nf-core/gatk4spark/applybqsr/main'
include { CRAM_MERGE_INDEX_SAMTOOLS } from '../cram_merge_index_samtools/main'
workflow BAM_APPLYBQSR_SPARK {
@@ -24,10 +24,10 @@ workflow BAM_APPLYBQSR_SPARK {
.map{ meta, cram, crai, recal, intervals, num_intervals -> [ meta + [ num_intervals:num_intervals ], cram, crai, recal, intervals ] }
// RUN APPLYBQSR SPARK
- GATK4_APPLYBQSR_SPARK(cram_intervals, fasta, fasta_fai, dict.map{ meta, it -> [ it ] })
+ GATK4SPARK_APPLYBQSR(cram_intervals, fasta, fasta_fai, dict.map{ meta, it -> [ it ] })
// Gather the recalibrated cram files
- cram_to_merge = GATK4_APPLYBQSR_SPARK.out.cram.map{ meta, cram -> [ groupKey(meta, meta.num_intervals), cram ] }.groupTuple()
+ cram_to_merge = GATK4SPARK_APPLYBQSR.out.cram.map{ meta, cram -> [ groupKey(meta, meta.num_intervals), cram ] }.groupTuple()
// Merge and index the recalibrated cram files
CRAM_MERGE_INDEX_SAMTOOLS(cram_to_merge, fasta, fasta_fai)
@@ -37,7 +37,7 @@ workflow BAM_APPLYBQSR_SPARK {
.map{ meta, cram, crai -> [ meta - meta.subMap('num_intervals'), cram, crai ] }
// Gather versions of all tools used
- versions = versions.mix(GATK4_APPLYBQSR_SPARK.out.versions)
+ versions = versions.mix(GATK4SPARK_APPLYBQSR.out.versions)
versions = versions.mix(CRAM_MERGE_INDEX_SAMTOOLS.out.versions)
emit:
diff --git a/subworkflows/local/bam_baserecalibrator_spark/main.nf b/subworkflows/local/bam_baserecalibrator_spark/main.nf
index 04bb491b4c..d6e12c39e0 100644
--- a/subworkflows/local/bam_baserecalibrator_spark/main.nf
+++ b/subworkflows/local/bam_baserecalibrator_spark/main.nf
@@ -4,8 +4,8 @@
// For all modules here:
// A when clause condition is defined in the conf/modules.config to determine if the module should be run
-include { GATK4_BASERECALIBRATOR_SPARK } from '../../../modules/nf-core/gatk4/baserecalibratorspark/main'
-include { GATK4_GATHERBQSRREPORTS } from '../../../modules/nf-core/gatk4/gatherbqsrreports/main'
+include { GATK4SPARK_BASERECALIBRATOR } from '../../../modules/nf-core/gatk4spark/baserecalibrator/main'
+include { GATK4_GATHERBQSRREPORTS } from '../../../modules/nf-core/gatk4/gatherbqsrreports/main'
workflow BAM_BASERECALIBRATOR_SPARK {
take:
@@ -26,10 +26,10 @@ workflow BAM_BASERECALIBRATOR_SPARK {
.map{ meta, cram, crai, intervals, num_intervals -> [ meta + [ num_intervals:num_intervals ], cram, crai, intervals ] }
// RUN BASERECALIBRATOR SPARK
- GATK4_BASERECALIBRATOR_SPARK(cram_intervals, fasta, fasta_fai, dict.map{ meta, it -> [ it ] }, known_sites, known_sites_tbi)
+ GATK4SPARK_BASERECALIBRATOR(cram_intervals, fasta, fasta_fai, dict.map{ meta, it -> [ it ] }, known_sites, known_sites_tbi)
// Figuring out if there is one or more table(s) from the same sample
- table_to_merge = GATK4_BASERECALIBRATOR_SPARK.out.table.map{ meta, table -> [ groupKey(meta, meta.num_intervals), table ] }.groupTuple().branch{
+ table_to_merge = GATK4SPARK_BASERECALIBRATOR.out.table.map{ meta, table -> [ groupKey(meta, meta.num_intervals), table ] }.groupTuple().branch{
// Use meta.num_intervals to asses number of intervals
single: it[0].num_intervals <= 1
multiple: it[0].num_intervals > 1
@@ -44,7 +44,7 @@ workflow BAM_BASERECALIBRATOR_SPARK {
.map{ meta, table -> [ meta - meta.subMap('num_intervals'), table ] }
// Gather versions of all tools used
- versions = versions.mix(GATK4_BASERECALIBRATOR_SPARK.out.versions)
+ versions = versions.mix(GATK4SPARK_BASERECALIBRATOR.out.versions)
versions = versions.mix(GATK4_GATHERBQSRREPORTS.out.versions)
emit:
diff --git a/subworkflows/local/bam_markduplicates_spark/main.nf b/subworkflows/local/bam_markduplicates_spark/main.nf
index e978bf5a38..8e7d0ee023 100644
--- a/subworkflows/local/bam_markduplicates_spark/main.nf
+++ b/subworkflows/local/bam_markduplicates_spark/main.nf
@@ -4,10 +4,10 @@
// For all modules here:
// A when clause condition is defined in the conf/modules.config to determine if the module should be run
-include { CRAM_QC_MOSDEPTH_SAMTOOLS } from '../cram_qc_mosdepth_samtools/main'
-include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../modules/nf-core/gatk4/estimatelibrarycomplexity/main'
-include { GATK4_MARKDUPLICATES_SPARK } from '../../../modules/nf-core/gatk4/markduplicatesspark/main'
-include { SAMTOOLS_INDEX as INDEX_MARKDUPLICATES } from '../../../modules/nf-core/samtools/index/main'
+include { CRAM_QC_MOSDEPTH_SAMTOOLS } from '../cram_qc_mosdepth_samtools/main'
+include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../modules/nf-core/gatk4/estimatelibrarycomplexity/main'
+include { GATK4SPARK_MARKDUPLICATES } from '../../../modules/nf-core/gatk4spark/markduplicates/main'
+include { SAMTOOLS_INDEX as INDEX_MARKDUPLICATES } from '../../../modules/nf-core/samtools/index/main'
workflow BAM_MARKDUPLICATES_SPARK {
take:
@@ -22,13 +22,13 @@ workflow BAM_MARKDUPLICATES_SPARK {
reports = Channel.empty()
// RUN MARKUPDUPLICATES SPARK
- GATK4_MARKDUPLICATES_SPARK(bam, fasta, fasta_fai, dict)
+ GATK4SPARK_MARKDUPLICATES(bam, fasta, fasta_fai, dict)
// Index cram
- INDEX_MARKDUPLICATES(GATK4_MARKDUPLICATES_SPARK.out.output)
+ INDEX_MARKDUPLICATES(GATK4SPARK_MARKDUPLICATES.out.output)
// Join with the crai file
- cram = GATK4_MARKDUPLICATES_SPARK.out.output.join(INDEX_MARKDUPLICATES.out.crai, failOnDuplicate: true, failOnMismatch: true)
+ cram = GATK4SPARK_MARKDUPLICATES.out.output.join(INDEX_MARKDUPLICATES.out.crai, failOnDuplicate: true, failOnMismatch: true)
// QC on CRAM
CRAM_QC_MOSDEPTH_SAMTOOLS(cram, fasta, intervals_bed_combined)
@@ -42,7 +42,7 @@ workflow BAM_MARKDUPLICATES_SPARK {
// Gather versions of all tools used
versions = versions.mix(GATK4_ESTIMATELIBRARYCOMPLEXITY.out.versions)
- versions = versions.mix(GATK4_MARKDUPLICATES_SPARK.out.versions)
+ versions = versions.mix(GATK4SPARK_MARKDUPLICATES.out.versions)
versions = versions.mix(INDEX_MARKDUPLICATES.out.versions)
versions = versions.mix(CRAM_QC_MOSDEPTH_SAMTOOLS.out.versions)
diff --git a/subworkflows/local/bam_variant_calling_cnvkit/main.nf b/subworkflows/local/bam_variant_calling_cnvkit/main.nf
index f90a2d6233..161c2dc21a 100644
--- a/subworkflows/local/bam_variant_calling_cnvkit/main.nf
+++ b/subworkflows/local/bam_variant_calling_cnvkit/main.nf
@@ -5,21 +5,27 @@
// A when clause condition is defined in the conf/modules.config to determine if the module should be run
include { CNVKIT_BATCH } from '../../../modules/nf-core/cnvkit/batch/main'
+include { CNVKIT_GENEMETRICS } from '../../../modules/nf-core/cnvkit/genemetrics/main'
workflow BAM_VARIANT_CALLING_CNVKIT {
take:
- cram // channel: [mandatory] cram
- fasta // channel: [mandatory] fasta
- fasta_fai // channel: [optional] fasta_fai
- targets // channel: [mandatory] bed
- reference // channel: [] cnn
+ cram // channel: [mandatory] meta, cram
+ fasta // channel: [mandatory] meta, fasta
+ fasta_fai // channel: [optional] meta, fasta_fai
+ targets // channel: [mandatory] meta, bed
+ reference // channel: [optional] meta, cnn
main:
+ versions = Channel.empty()
generate_pon = false
CNVKIT_BATCH(cram, fasta, fasta_fai, targets, reference, generate_pon)
- versions = CNVKIT_BATCH.out.versions
+ ch_genemetrics = CNVKIT_BATCH.out.cnr.join(CNVKIT_BATCH.out.cns).map{ meta, cnr, cns -> [meta, cnr, cns[2]]}
+ CNVKIT_GENEMETRICS(ch_genemetrics)
+
+ versions = versions.mix(CNVKIT_BATCH.out.versions)
+ versions = versions.mix(CNVKIT_GENEMETRICS.out.versions)
emit:
versions // channel: [ versions.yml ]
diff --git a/subworkflows/local/bam_variant_calling_germline_all/main.nf b/subworkflows/local/bam_variant_calling_germline_all/main.nf
index 5989023adf..1f751c2263 100644
--- a/subworkflows/local/bam_variant_calling_germline_all/main.nf
+++ b/subworkflows/local/bam_variant_calling_germline_all/main.nf
@@ -2,20 +2,21 @@
// GERMLINE VARIANT CALLING
//
-include { BAM_JOINT_CALLING_GERMLINE_GATK } from '../bam_joint_calling_germline_gatk/main'
-include { BAM_JOINT_CALLING_GERMLINE_SENTIEON } from '../bam_joint_calling_germline_sentieon/main'
-include { BAM_VARIANT_CALLING_CNVKIT } from '../bam_variant_calling_cnvkit/main'
-include { BAM_VARIANT_CALLING_DEEPVARIANT } from '../bam_variant_calling_deepvariant/main'
-include { BAM_VARIANT_CALLING_FREEBAYES } from '../bam_variant_calling_freebayes/main'
-include { BAM_VARIANT_CALLING_GERMLINE_MANTA } from '../bam_variant_calling_germline_manta/main'
-include { BAM_VARIANT_CALLING_HAPLOTYPECALLER } from '../bam_variant_calling_haplotypecaller/main'
-include { BAM_VARIANT_CALLING_SENTIEON_DNASCOPE } from '../bam_variant_calling_sentieon_dnascope/main'
-include { BAM_VARIANT_CALLING_SENTIEON_HAPLOTYPER } from '../bam_variant_calling_sentieon_haplotyper/main'
-include { BAM_VARIANT_CALLING_MPILEUP } from '../bam_variant_calling_mpileup/main'
-include { BAM_VARIANT_CALLING_SINGLE_STRELKA } from '../bam_variant_calling_single_strelka/main'
-include { BAM_VARIANT_CALLING_SINGLE_TIDDIT } from '../bam_variant_calling_single_tiddit/main'
-include { SENTIEON_DNAMODELAPPLY } from '../../../modules/nf-core/sentieon/dnamodelapply/main'
-include { VCF_VARIANT_FILTERING_GATK } from '../vcf_variant_filtering_gatk/main'
+include { BAM_JOINT_CALLING_GERMLINE_GATK } from '../bam_joint_calling_germline_gatk/main'
+include { BAM_JOINT_CALLING_GERMLINE_SENTIEON } from '../bam_joint_calling_germline_sentieon/main'
+include { BAM_VARIANT_CALLING_CNVKIT } from '../bam_variant_calling_cnvkit/main'
+include { BAM_VARIANT_CALLING_DEEPVARIANT } from '../bam_variant_calling_deepvariant/main'
+include { BAM_VARIANT_CALLING_FREEBAYES } from '../bam_variant_calling_freebayes/main'
+include { BAM_VARIANT_CALLING_GERMLINE_MANTA } from '../bam_variant_calling_germline_manta/main'
+include { BAM_VARIANT_CALLING_HAPLOTYPECALLER } from '../bam_variant_calling_haplotypecaller/main'
+include { BAM_VARIANT_CALLING_SENTIEON_DNASCOPE } from '../bam_variant_calling_sentieon_dnascope/main'
+include { BAM_VARIANT_CALLING_SENTIEON_HAPLOTYPER } from '../bam_variant_calling_sentieon_haplotyper/main'
+include { BAM_VARIANT_CALLING_MPILEUP } from '../bam_variant_calling_mpileup/main'
+include { BAM_VARIANT_CALLING_SINGLE_STRELKA } from '../bam_variant_calling_single_strelka/main'
+include { BAM_VARIANT_CALLING_SINGLE_TIDDIT } from '../bam_variant_calling_single_tiddit/main'
+include { SENTIEON_DNAMODELAPPLY } from '../../../modules/nf-core/sentieon/dnamodelapply/main'
+include { VCF_VARIANT_FILTERING_GATK } from '../vcf_variant_filtering_gatk/main'
+include { VCF_VARIANT_FILTERING_GATK as SENTIEON_HAPLOTYPER_VCF_VARIANT_FILTERING_GATK } from '../vcf_variant_filtering_gatk/main'
@@ -83,10 +84,10 @@ workflow BAM_VARIANT_CALLING_GERMLINE_ALL {
BAM_VARIANT_CALLING_CNVKIT(
// Remap channel to match module/subworkflow
cram.map{ meta, cram, crai -> [ meta, [], cram ] },
- fasta,
- fasta_fai,
- intervals_bed_combined,
- []
+ fasta.map{ it -> [[id:it[0].baseName], it] },
+ fasta_fai.map{ it -> [[id:it[0].baseName], it] },
+ intervals_bed_combined.map{ it -> [[id:it[0].baseName], it] },
+ [[id:"null"], []]
)
versions = versions.mix(BAM_VARIANT_CALLING_CNVKIT.out.versions)
}
@@ -181,8 +182,8 @@ workflow BAM_VARIANT_CALLING_GERMLINE_ALL {
if (tools.split(',').contains('manta')) {
BAM_VARIANT_CALLING_GERMLINE_MANTA (
cram,
- fasta,
- fasta_fai,
+ fasta.map{ it -> [ [ id:'fasta' ], it ] },
+ fasta_fai.map{ it -> [ [ id:'fasta_fai' ], it ] },
intervals_bed_gz_tbi_combined
)
@@ -295,7 +296,7 @@ workflow BAM_VARIANT_CALLING_GERMLINE_ALL {
// If single sample track, check if filtering should be done
if (!(skip_tools && skip_tools.split(',').contains('haplotyper_filter'))) {
- VCF_VARIANT_FILTERING_GATK(
+ SENTIEON_HAPLOTYPER_VCF_VARIANT_FILTERING_GATK(
vcf_sentieon_haplotyper.join(vcf_tbi_sentieon_haplotyper, failOnDuplicate: true, failOnMismatch: true),
fasta,
fasta_fai,
@@ -304,9 +305,9 @@ workflow BAM_VARIANT_CALLING_GERMLINE_ALL {
known_sites_indels.concat(known_sites_snps).flatten().unique().collect(),
known_sites_indels_tbi.concat(known_sites_snps_tbi).flatten().unique().collect())
- vcf_sentieon_haplotyper = VCF_VARIANT_FILTERING_GATK.out.filtered_vcf
+ vcf_sentieon_haplotyper = SENTIEON_HAPLOTYPER_VCF_VARIANT_FILTERING_GATK.out.filtered_vcf
- versions = versions.mix(VCF_VARIANT_FILTERING_GATK.out.versions)
+ versions = versions.mix(SENTIEON_HAPLOTYPER_VCF_VARIANT_FILTERING_GATK.out.versions)
}
}
}
diff --git a/subworkflows/local/bam_variant_calling_germline_manta/main.nf b/subworkflows/local/bam_variant_calling_germline_manta/main.nf
index 36cb957d6f..d27a999a68 100644
--- a/subworkflows/local/bam_variant_calling_germline_manta/main.nf
+++ b/subworkflows/local/bam_variant_calling_germline_manta/main.nf
@@ -10,8 +10,8 @@ include { MANTA_GERMLINE } from '../../../modules/nf-core/manta/germline/main'
workflow BAM_VARIANT_CALLING_GERMLINE_MANTA {
take:
cram // channel: [mandatory] [ meta, cram, crai ]
- fasta // channel: [mandatory] [ fasta ]
- fasta_fai // channel: [mandatory] [ fasta_fai ]
+ fasta // channel: [mandatory] [ meta, fasta ]
+ fasta_fai // channel: [mandatory] [ meta, fasta_fai ]
intervals // channel: [mandatory] [ interval.bed.gz, interval.bed.gz.tbi] or [ [], []] if no intervals; intervals file contains all intervals
main:
@@ -25,7 +25,7 @@ workflow BAM_VARIANT_CALLING_GERMLINE_MANTA {
[it[0], it[1], it[2], bed_gz, bed_tbi]
}
- MANTA_GERMLINE(cram_intervals, fasta.map{ fasta -> [ [ id:fasta.baseName ], fasta ] }, fasta_fai.map{ fasta_fai -> [ [ id:fasta_fai.baseName ], fasta_fai ] })
+ MANTA_GERMLINE(cram_intervals, fasta, fasta_fai, [])
small_indels_vcf = MANTA_GERMLINE.out.candidate_small_indels_vcf
sv_vcf = MANTA_GERMLINE.out.candidate_sv_vcf
diff --git a/subworkflows/local/bam_variant_calling_mpileup/main.nf b/subworkflows/local/bam_variant_calling_mpileup/main.nf
index 412ba38619..663ed6a0bc 100644
--- a/subworkflows/local/bam_variant_calling_mpileup/main.nf
+++ b/subworkflows/local/bam_variant_calling_mpileup/main.nf
@@ -4,10 +4,10 @@
// For all modules here:
// A when clause condition is defined in the conf/modules.config to determine if the module should be run
-include { CAT_CAT as CAT_MPILEUP } from '../../../modules/nf-core/cat/cat/main'
-include { BCFTOOLS_MPILEUP } from '../../../modules/nf-core/bcftools/mpileup/main'
-include { SAMTOOLS_MPILEUP } from '../../../modules/nf-core/samtools/mpileup/main'
-include { GATK4_MERGEVCFS as MERGE_BCFTOOLS_MPILEUP } from '../../../modules/nf-core/gatk4/mergevcfs/main'
+include { BCFTOOLS_MPILEUP } from '../../../modules/nf-core/bcftools/mpileup/main'
+include { CAT_CAT as CAT_MPILEUP } from '../../../modules/nf-core/cat/cat/main'
+include { GATK4_MERGEVCFS as MERGE_BCFTOOLS_MPILEUP } from '../../../modules/nf-core/gatk4/mergevcfs/main'
+include { SAMTOOLS_MPILEUP } from '../../../modules/nf-core/samtools/mpileup/main'
workflow BAM_VARIANT_CALLING_MPILEUP {
take:
@@ -26,7 +26,7 @@ workflow BAM_VARIANT_CALLING_MPILEUP {
// Run, if --tools mpileup
keep_bcftools_mpileup = false
- BCFTOOLS_MPILEUP(cram_intervals, fasta, keep_bcftools_mpileup)
+ BCFTOOLS_MPILEUP(cram_intervals, fasta.map{ it -> [[id:it[0].baseName], it] }, keep_bcftools_mpileup)
//Only run, if --tools ControlFreec
SAMTOOLS_MPILEUP(cram_intervals, fasta)
diff --git a/subworkflows/local/bam_variant_calling_somatic_all/main.nf b/subworkflows/local/bam_variant_calling_somatic_all/main.nf
index 672b43c530..68fa768c88 100644
--- a/subworkflows/local/bam_variant_calling_somatic_all/main.nf
+++ b/subworkflows/local/bam_variant_calling_somatic_all/main.nf
@@ -120,10 +120,10 @@ workflow BAM_VARIANT_CALLING_SOMATIC_ALL {
BAM_VARIANT_CALLING_CNVKIT(
// Remap channel to match module/subworkflow
cram.map{ meta, normal_cram, normal_crai, tumor_cram, tumor_crai -> [ meta, tumor_cram, normal_cram ] },
- fasta,
- fasta_fai,
- intervals_bed_combined,
- []
+ fasta.map{ it -> [[id:it[0].baseName], it] },
+ fasta_fai.map{ it -> [[id:it[0].baseName], it] },
+ intervals_bed_combined.map{ it -> [[id:it[0].baseName], it] },
+ [[id:"null"], []]
)
versions = versions.mix(BAM_VARIANT_CALLING_CNVKIT.out.versions)
@@ -176,8 +176,8 @@ workflow BAM_VARIANT_CALLING_SOMATIC_ALL {
if (tools.split(',').contains('manta')) {
BAM_VARIANT_CALLING_SOMATIC_MANTA(
cram,
- fasta,
- fasta_fai,
+ fasta.map{ it -> [ [ id:'fasta' ], it ] },
+ fasta_fai.map{ it -> [ [ id:'fasta_fai' ], it ] },
intervals_bed_gz_tbi_combined
)
diff --git a/subworkflows/local/bam_variant_calling_somatic_manta/main.nf b/subworkflows/local/bam_variant_calling_somatic_manta/main.nf
index 7eb5e6687d..f6720c5406 100644
--- a/subworkflows/local/bam_variant_calling_somatic_manta/main.nf
+++ b/subworkflows/local/bam_variant_calling_somatic_manta/main.nf
@@ -9,8 +9,8 @@ include { MANTA_SOMATIC } from '../../../modules/nf-core/manta/somatic/main'
workflow BAM_VARIANT_CALLING_SOMATIC_MANTA {
take:
cram // channel: [mandatory] [ meta, cram1, crai1, cram2, crai2 ]
- fasta // channel: [mandatory] [ fasta ]
- fasta_fai // channel: [mandatory] [ fasta_fai ]
+ fasta // channel: [mandatory] [ meta, fasta ]
+ fasta_fai // channel: [mandatory] [ meta, fasta_fai ]
intervals // channel: [mandatory] [ interval.bed.gz, interval.bed.gz.tbi ] or [ [], [] ] if no intervals
main:
@@ -24,7 +24,7 @@ workflow BAM_VARIANT_CALLING_SOMATIC_MANTA {
[it[0], it[1], it[2], it[3], it[4], bed_gz, bed_tbi]
}
- MANTA_SOMATIC(cram_intervals, fasta, fasta_fai)
+ MANTA_SOMATIC(cram_intervals, fasta, fasta_fai, [])
candidate_small_indels_vcf = MANTA_SOMATIC.out.candidate_small_indels_vcf
candidate_small_indels_vcf_tbi = MANTA_SOMATIC.out.candidate_small_indels_vcf_tbi
diff --git a/subworkflows/local/bam_variant_calling_somatic_mutect2/main.nf b/subworkflows/local/bam_variant_calling_somatic_mutect2/main.nf
index d9ab08e1ba..3f5b7d53ca 100644
--- a/subworkflows/local/bam_variant_calling_somatic_mutect2/main.nf
+++ b/subworkflows/local/bam_variant_calling_somatic_mutect2/main.nf
@@ -31,7 +31,7 @@ workflow BAM_VARIANT_CALLING_SOMATIC_MUTECT2 {
versions = Channel.empty()
//If no germline resource is provided, then create an empty channel to avoid GetPileupsummaries from being run
- germline_resource_pileup = germline_resource_tbi ? germline_resource : Channel.empty()
+ germline_resource_pileup = (germline_resource && germline_resource_tbi) ? germline_resource : Channel.empty()
germline_resource_pileup_tbi = germline_resource_tbi ?: Channel.empty()
// Combine input and intervals for spread and gather strategy
diff --git a/subworkflows/local/bam_variant_calling_tumor_only_all/main.nf b/subworkflows/local/bam_variant_calling_tumor_only_all/main.nf
index d92b263b8a..31d968a245 100644
--- a/subworkflows/local/bam_variant_calling_tumor_only_all/main.nf
+++ b/subworkflows/local/bam_variant_calling_tumor_only_all/main.nf
@@ -85,10 +85,10 @@ workflow BAM_VARIANT_CALLING_TUMOR_ONLY_ALL {
BAM_VARIANT_CALLING_CNVKIT (
// Remap channel to match module/subworkflow
cram.map{ meta, cram, crai -> [ meta, cram, [] ] },
- fasta,
- fasta_fai,
- [],
- cnvkit_reference
+ fasta.map{ it -> [[id:it[0].baseName], it] },
+ fasta_fai.map{ it -> [[id:it[0].baseName], it] },
+ [[id:"null"], []],
+ cnvkit_reference.map{ it -> [[id:it[0].baseName], it] }
)
versions = versions.mix(BAM_VARIANT_CALLING_CNVKIT.out.versions)
@@ -141,9 +141,8 @@ workflow BAM_VARIANT_CALLING_TUMOR_ONLY_ALL {
BAM_VARIANT_CALLING_TUMOR_ONLY_MANTA(
cram,
// Remap channel to match module/subworkflow
- dict.map{ it -> [ [ id:'dict' ], it ] },
- fasta,
- fasta_fai,
+ fasta.map{ it -> [ [ id:'fasta' ], it ] },
+ fasta_fai.map{ it -> [ [ id:'fasta_fai' ], it ] },
intervals_bed_gz_tbi_combined
)
diff --git a/subworkflows/local/bam_variant_calling_tumor_only_manta/main.nf b/subworkflows/local/bam_variant_calling_tumor_only_manta/main.nf
index 10045c7356..38f5d4366c 100644
--- a/subworkflows/local/bam_variant_calling_tumor_only_manta/main.nf
+++ b/subworkflows/local/bam_variant_calling_tumor_only_manta/main.nf
@@ -10,9 +10,8 @@ include { MANTA_TUMORONLY } from '../../../modules/nf-core/manta/tumoronly/main'
workflow BAM_VARIANT_CALLING_TUMOR_ONLY_MANTA {
take:
cram // channel: [mandatory] [ meta, cram, crai ]
- dict // channel: [optional] [ meta, dict ]
- fasta // channel: [mandatory] [ fasta ]
- fasta_fai // channel: [mandatory] [ fasta_fai ]
+ fasta // channel: [mandatory] [ meta, fasta ]
+ fasta_fai // channel: [mandatory] [ meta, fasta_fai ]
intervals // channel: [mandatory] [ interval.bed.gz, interval.bed.gz.tbi ] or [ [], [] ] if no intervals
main:
@@ -26,7 +25,7 @@ workflow BAM_VARIANT_CALLING_TUMOR_ONLY_MANTA {
[it[0], it[1], it[2], bed_gz, bed_tbi]
}
- MANTA_TUMORONLY(cram_intervals, fasta, fasta_fai)
+ MANTA_TUMORONLY(cram_intervals, fasta, fasta_fai, [])
small_indels_vcf = MANTA_TUMORONLY.out.candidate_small_indels_vcf
candidate_sv_vcf = MANTA_TUMORONLY.out.candidate_sv_vcf
diff --git a/subworkflows/local/cram_sampleqc/main.nf b/subworkflows/local/cram_sampleqc/main.nf
new file mode 100644
index 0000000000..3e3b06f008
--- /dev/null
+++ b/subworkflows/local/cram_sampleqc/main.nf
@@ -0,0 +1,30 @@
+include { BAM_NGSCHECKMATE } from '../../../subworkflows/nf-core/bam_ngscheckmate/main'
+
+workflow CRAM_SAMPLEQC {
+
+ take:
+ ch_cram // channel: [ val(meta), cram, crai ]
+ ngscheckmate_bed // channel: [ ngscheckmate_bed ]
+ fasta // channel: [ fasta ]
+
+ main:
+
+ ch_versions = Channel.empty()
+
+ ch_ngscheckmate_bed = ngscheckmate_bed.map{bed -> [[id: "ngscheckmate"], bed]}
+
+ ch_fasta = fasta.map{fasta -> [[id: "genome"], fasta]}
+
+ BAM_NGSCHECKMATE ( ch_cram.map{meta, cram, crai -> [meta, cram]}, ch_ngscheckmate_bed, ch_fasta)
+ ch_versions = ch_versions.mix(BAM_NGSCHECKMATE.out.versions.first())
+
+ emit:
+ corr_matrix = BAM_NGSCHECKMATE.out.corr_matrix // channel: [ meta, corr_matrix ]
+ matched = BAM_NGSCHECKMATE.out.matched // channel: [ meta, matched ]
+ all = BAM_NGSCHECKMATE.out.all // channel: [ meta, all ]
+ vcf = BAM_NGSCHECKMATE.out.vcf // channel: [ meta, vcf ]
+ pdf = BAM_NGSCHECKMATE.out.pdf // channel: [ meta, pdf ]
+
+ versions = ch_versions // channel: [ versions.yml ]
+}
+
diff --git a/subworkflows/local/prepare_cache/main.nf b/subworkflows/local/download_cache_snpeff_vep/main.nf
similarity index 94%
rename from subworkflows/local/prepare_cache/main.nf
rename to subworkflows/local/download_cache_snpeff_vep/main.nf
index 601a453720..f9f776db7c 100644
--- a/subworkflows/local/prepare_cache/main.nf
+++ b/subworkflows/local/download_cache_snpeff_vep/main.nf
@@ -1,5 +1,5 @@
//
-// PREPARE CACHE
+// DOWNLOAD CACHE SNPEFF VEP
//
// Initialize channels based on params or indices that were just built
@@ -11,7 +11,7 @@
include { ENSEMBLVEP_DOWNLOAD } from '../../../modules/nf-core/ensemblvep/download/main'
include { SNPEFF_DOWNLOAD } from '../../../modules/nf-core/snpeff/download/main'
-workflow PREPARE_CACHE {
+workflow DOWNLOAD_CACHE_SNPEFF_VEP {
take:
ensemblvep_info
snpeff_info
diff --git a/subworkflows/local/initialize_annotation_cache/main.nf b/subworkflows/local/initialize_annotation_cache/main.nf
new file mode 100644
index 0000000000..d2c6fcb7d6
--- /dev/null
+++ b/subworkflows/local/initialize_annotation_cache/main.nf
@@ -0,0 +1,57 @@
+//
+// INITIALIZE ANNOTATION CACHE
+//
+
+// Initialize channels based on params or indices that were just built
+// For all modules here:
+// A when clause condition is defined in the conf/modules.config to determine if the module should be run
+// Condition is based on params.step and params.tools
+// If and extra condition exists, it's specified in comments
+
+workflow INITIALIZE_ANNOTATION_CACHE {
+ take:
+ snpeff_enabled
+ snpeff_cache
+ snpeff_genome
+ snpeff_db
+ vep_enabled
+ vep_cache
+ vep_species
+ vep_cache_version
+ vep_genome
+ help_message
+
+ main:
+ if (snpeff_enabled) {
+ def snpeff_annotation_cache_key = (snpeff_cache == "s3://annotation-cache/snpeff_cache/") ? "${snpeff_genome}.${snpeff_db}/" : ""
+ def snpeff_cache_dir = "${snpeff_annotation_cache_key}${snpeff_genome}.${snpeff_db}"
+ def snpeff_cache_path_full = file("$snpeff_cache/$snpeff_cache_dir", type: 'dir')
+ if ( !snpeff_cache_path_full.exists() || !snpeff_cache_path_full.isDirectory() ) {
+ if (snpeff_cache == "s3://annotation-cache/snpeff_cache/") {
+ error("This path is not available within annotation-cache.\nPlease check https://annotation-cache.github.io/ to create a request for it.")
+ } else {
+ error("Path provided with SnpEff cache is invalid.\nMake sure there is a directory named ${snpeff_cache_dir} in ${snpeff_cache}./n${help_message}")
+ }
+ }
+ snpeff_cache = Channel.fromPath(file("${snpeff_cache}/${snpeff_annotation_cache_key}"), checkIfExists: true).collect()
+ .map{ cache -> [ [ id:"${snpeff_genome}.${snpeff_db}" ], cache ] }
+ } else snpeff_cache = []
+
+ if (vep_enabled) {
+ def vep_annotation_cache_key = (vep_cache == "s3://annotation-cache/vep_cache/") ? "${vep_cache_version}_${vep_genome}/" : ""
+ def vep_cache_dir = "${vep_annotation_cache_key}${vep_species}/${vep_cache_version}_${vep_genome}"
+ def vep_cache_path_full = file("$vep_cache/$vep_cache_dir", type: 'dir')
+ if ( !vep_cache_path_full.exists() || !vep_cache_path_full.isDirectory() ) {
+ if (vep_cache == "s3://annotation-cache/vep_cache/") {
+ error("This path is not available within annotation-cache.\nPlease check https://annotation-cache.github.io/ to create a request for it.")
+ } else {
+ error("Path provided with VEP cache is invalid.\nMake sure there is a directory named ${vep_cache_dir} in ${vep_cache}./n${help_message}")
+ }
+ }
+ ensemblvep_cache = Channel.fromPath(file("${vep_cache}/${vep_annotation_cache_key}"), checkIfExists: true).collect()
+ } else ensemblvep_cache = []
+
+ emit:
+ ensemblvep_cache // channel: [ meta, cache ]
+ snpeff_cache // channel: [ meta, cache ]
+}
diff --git a/subworkflows/local/prepare_genome/main.nf b/subworkflows/local/prepare_genome/main.nf
index 4945282d98..f9b9e62c95 100644
--- a/subworkflows/local/prepare_genome/main.nf
+++ b/subworkflows/local/prepare_genome/main.nf
@@ -8,37 +8,39 @@
// Condition is based on params.step and params.tools
// If and extra condition exists, it's specified in comments
-include { BWA_INDEX as BWAMEM1_INDEX } from '../../../modules/nf-core/bwa/index/main'
-include { BWAMEM2_INDEX } from '../../../modules/nf-core/bwamem2/index/main'
-include { DRAGMAP_HASHTABLE } from '../../../modules/nf-core/dragmap/hashtable/main'
-include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../modules/nf-core/gatk4/createsequencedictionary/main'
-include { MSISENSORPRO_SCAN } from '../../../modules/nf-core/msisensorpro/scan/main'
-include { SAMTOOLS_FAIDX } from '../../../modules/nf-core/samtools/faidx/main'
-include { TABIX_TABIX as TABIX_DBSNP } from '../../../modules/nf-core/tabix/tabix/main'
-include { TABIX_TABIX as TABIX_GERMLINE_RESOURCE } from '../../../modules/nf-core/tabix/tabix/main'
-include { TABIX_TABIX as TABIX_KNOWN_INDELS } from '../../../modules/nf-core/tabix/tabix/main'
-include { TABIX_TABIX as TABIX_KNOWN_SNPS } from '../../../modules/nf-core/tabix/tabix/main'
-include { TABIX_TABIX as TABIX_PON } from '../../../modules/nf-core/tabix/tabix/main'
-include { UNTAR as UNTAR_CHR_DIR } from '../../../modules/nf-core/untar/main'
-include { UNZIP as UNZIP_ALLELES } from '../../../modules/nf-core/unzip/main'
-include { UNZIP as UNZIP_GC } from '../../../modules/nf-core/unzip/main'
-include { UNZIP as UNZIP_LOCI } from '../../../modules/nf-core/unzip/main'
-include { UNZIP as UNZIP_RT } from '../../../modules/nf-core/unzip/main'
+include { BWA_INDEX as BWAMEM1_INDEX } from '../../../modules/nf-core/bwa/index/main'
+include { BWAMEM2_INDEX } from '../../../modules/nf-core/bwamem2/index/main'
+include { DRAGMAP_HASHTABLE } from '../../../modules/nf-core/dragmap/hashtable/main'
+include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../modules/nf-core/gatk4/createsequencedictionary/main'
+include { MSISENSORPRO_SCAN } from '../../../modules/nf-core/msisensorpro/scan/main'
+include { SAMTOOLS_FAIDX } from '../../../modules/nf-core/samtools/faidx/main'
+include { TABIX_TABIX as TABIX_BCFTOOLS_ANNOTATIONS } from '../../../modules/nf-core/tabix/tabix/main'
+include { TABIX_TABIX as TABIX_DBSNP } from '../../../modules/nf-core/tabix/tabix/main'
+include { TABIX_TABIX as TABIX_GERMLINE_RESOURCE } from '../../../modules/nf-core/tabix/tabix/main'
+include { TABIX_TABIX as TABIX_KNOWN_INDELS } from '../../../modules/nf-core/tabix/tabix/main'
+include { TABIX_TABIX as TABIX_KNOWN_SNPS } from '../../../modules/nf-core/tabix/tabix/main'
+include { TABIX_TABIX as TABIX_PON } from '../../../modules/nf-core/tabix/tabix/main'
+include { UNTAR as UNTAR_CHR_DIR } from '../../../modules/nf-core/untar/main'
+include { UNZIP as UNZIP_ALLELES } from '../../../modules/nf-core/unzip/main'
+include { UNZIP as UNZIP_GC } from '../../../modules/nf-core/unzip/main'
+include { UNZIP as UNZIP_LOCI } from '../../../modules/nf-core/unzip/main'
+include { UNZIP as UNZIP_RT } from '../../../modules/nf-core/unzip/main'
workflow PREPARE_GENOME {
take:
- ascat_alleles // channel: [optional] ascat allele files
- ascat_loci // channel: [optional] ascat loci files
- ascat_loci_gc // channel: [optional] ascat gc content file
- ascat_loci_rt // channel: [optional] ascat replictiming file
- chr_dir // channel: [optional] chromosome files
- dbsnp // channel: [optional] dbsnp
- fasta // channel: [mandatory] fasta
- fasta_fai // channel: [optional] fasta_fai
- germline_resource // channel: [optional] germline_resource
- known_indels // channel: [optional] known_indels
- known_snps // channel: [optional] known_snps
- pon // channel: [optional] pon
+ ascat_alleles // channel: [optional] ascat allele files
+ ascat_loci // channel: [optional] ascat loci files
+ ascat_loci_gc // channel: [optional] ascat gc content file
+ ascat_loci_rt // channel: [optional] ascat replictiming file
+ bcftools_annotations // channel: [optional] bcftools annotations file
+ chr_dir // channel: [optional] chromosome files
+ dbsnp // channel: [optional] dbsnp
+ fasta // channel: [mandatory] fasta
+ fasta_fai // channel: [optional] fasta_fai
+ germline_resource // channel: [optional] germline_resource
+ known_indels // channel: [optional] known_indels
+ known_snps // channel: [optional] known_snps
+ pon // channel: [optional] pon
main:
@@ -57,6 +59,7 @@ workflow PREPARE_GENOME {
// written for KNOWN_INDELS, but preemptively applied to the rest
// [ file1, file2 ] becomes [ [ meta1, file1 ], [ meta2, file2 ] ]
// outputs are collected to maintain a single channel for relevant TBI files
+ TABIX_BCFTOOLS_ANNOTATIONS(bcftools_annotations.flatten().map{ it -> [ [ id:it.baseName ], it ] })
TABIX_DBSNP(dbsnp.flatten().map{ it -> [ [ id:it.baseName ], it ] })
TABIX_GERMLINE_RESOURCE(germline_resource.flatten().map{ it -> [ [ id:it.baseName ], it ] })
TABIX_KNOWN_SNPS(known_snps.flatten().map{ it -> [ [ id:it.baseName ], it ] } )
@@ -105,6 +108,7 @@ workflow PREPARE_GENOME {
versions = versions.mix(DRAGMAP_HASHTABLE.out.versions)
versions = versions.mix(GATK4_CREATESEQUENCEDICTIONARY.out.versions)
versions = versions.mix(MSISENSORPRO_SCAN.out.versions)
+ versions = versions.mix(TABIX_BCFTOOLS_ANNOTATIONS.out.versions)
versions = versions.mix(TABIX_DBSNP.out.versions)
versions = versions.mix(TABIX_GERMLINE_RESOURCE.out.versions)
versions = versions.mix(TABIX_KNOWN_SNPS.out.versions)
@@ -112,17 +116,18 @@ workflow PREPARE_GENOME {
versions = versions.mix(TABIX_PON.out.versions)
emit:
- bwa = BWAMEM1_INDEX.out.index.map{ meta, index -> [index] }.collect() // path: bwa/*
- bwamem2 = BWAMEM2_INDEX.out.index.map{ meta, index -> [index] }.collect() // path: bwamem2/*
- hashtable = DRAGMAP_HASHTABLE.out.hashmap.map{ meta, index -> [index] }.collect() // path: dragmap/*
- dbsnp_tbi = TABIX_DBSNP.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: dbsnb.vcf.gz.tbi
- dict = GATK4_CREATESEQUENCEDICTIONARY.out.dict // path: genome.fasta.dict
- fasta_fai = SAMTOOLS_FAIDX.out.fai.map{ meta, fai -> [fai] } // path: genome.fasta.fai
- germline_resource_tbi = TABIX_GERMLINE_RESOURCE.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: germline_resource.vcf.gz.tbi
- known_snps_tbi = TABIX_KNOWN_SNPS.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: {known_indels*}.vcf.gz.tbi
- known_indels_tbi = TABIX_KNOWN_INDELS.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: {known_indels*}.vcf.gz.tbi
- msisensorpro_scan = MSISENSORPRO_SCAN.out.list.map{ meta, list -> [list] } // path: genome_msi.list
- pon_tbi = TABIX_PON.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: pon.vcf.gz.tbi
+ bcftools_annotations_tbi = TABIX_BCFTOOLS_ANNOTATIONS.out.tbi.map{ meta, tbi -> [tbi] }.collect() // bcftools_annotations.vcf.gz.tbi
+ bwa = BWAMEM1_INDEX.out.index.map{ meta, index -> [index] }.collect() // path: bwa/*
+ bwamem2 = BWAMEM2_INDEX.out.index.map{ meta, index -> [index] }.collect() // path: bwamem2/*
+ hashtable = DRAGMAP_HASHTABLE.out.hashmap.map{ meta, index -> [index] }.collect() // path: dragmap/*
+ dbsnp_tbi = TABIX_DBSNP.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: dbsnb.vcf.gz.tbi
+ dict = GATK4_CREATESEQUENCEDICTIONARY.out.dict // path: genome.fasta.dict
+ fasta_fai = SAMTOOLS_FAIDX.out.fai.map{ meta, fai -> [fai] } // path: genome.fasta.fai
+ germline_resource_tbi = TABIX_GERMLINE_RESOURCE.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: germline_resource.vcf.gz.tbi
+ known_snps_tbi = TABIX_KNOWN_SNPS.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: {known_indels*}.vcf.gz.tbi
+ known_indels_tbi = TABIX_KNOWN_INDELS.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: {known_indels*}.vcf.gz.tbi
+ msisensorpro_scan = MSISENSORPRO_SCAN.out.list.map{ meta, list -> [list] } // path: genome_msi.list
+ pon_tbi = TABIX_PON.out.tbi.map{ meta, tbi -> [tbi] }.collect() // path: pon.vcf.gz.tbi
allele_files
chr_files
gc_file
diff --git a/subworkflows/local/samplesheet_to_channel/main.nf b/subworkflows/local/samplesheet_to_channel/main.nf
new file mode 100644
index 0000000000..6784b4616b
--- /dev/null
+++ b/subworkflows/local/samplesheet_to_channel/main.nf
@@ -0,0 +1,296 @@
+workflow SAMPLESHEET_TO_CHANNEL{
+
+ take:
+ ch_from_samplesheet
+
+ main:
+ ch_from_samplesheet.dump(tag:"ch_from_samplesheet")
+ input_sample = ch_from_samplesheet.map{ meta, fastq_1, fastq_2, table, cram, crai, bam, bai, vcf, variantcaller ->
+ // generate patient_sample key to group lanes together
+ [ meta.patient + meta.sample, [meta, fastq_1, fastq_2, table, cram, crai, bam, bai, vcf, variantcaller] ]
+ }.tap{ ch_with_patient_sample } // save the channel
+ .groupTuple() //group by patient_sample to get all lanes
+ .map { patient_sample, ch_items ->
+ // get number of lanes per sample
+ [ patient_sample, ch_items.size() ]
+ }.combine(ch_with_patient_sample, by: 0) // for each entry add numLanes
+ .map { patient_sample, num_lanes, ch_items ->
+ (meta, fastq_1, fastq_2, table, cram, crai, bam, bai, vcf, variantcaller) = ch_items
+ if (meta.lane && fastq_2) {
+ meta = meta + [id: "${meta.sample}-${meta.lane}".toString()]
+ def CN = params.seq_center ? "CN:${params.seq_center}\\t" : ''
+
+ def flowcell = flowcellLaneFromFastq(fastq_1)
+ // Don't use a random element for ID, it breaks resuming
+ def read_group = "\"@RG\\tID:${flowcell}.${meta.sample}.${meta.lane}\\t${CN}PU:${meta.lane}\\tSM:${meta.patient}_${meta.sample}\\tLB:${meta.sample}\\tDS:${params.fasta}\\tPL:${params.seq_platform}\""
+
+ meta = meta - meta.subMap('lane') + [num_lanes: num_lanes.toInteger(), read_group: read_group.toString(), data_type: 'fastq', size: 1]
+
+ if (params.step == 'mapping') return [ meta, [ fastq_1, fastq_2 ] ]
+ else {
+ error("Samplesheet contains fastq files but step is `$params.step`. Please check your samplesheet or adjust the step parameter.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+
+ // start from BAM
+ } else if (meta.lane && bam) {
+ if (params.step != 'mapping' && !bai) {
+ error("BAM index (bai) should be provided.")
+ }
+ meta = meta + [id: "${meta.sample}-${meta.lane}".toString()]
+ def CN = params.seq_center ? "CN:${params.seq_center}\\t" : ''
+ def read_group = "\"@RG\\tID:${meta.sample}_${meta.lane}\\t${CN}PU:${meta.lane}\\tSM:${meta.patient}_${meta.sample}\\tLB:${meta.sample}\\tDS:${params.fasta}\\tPL:${params.seq_platform}\""
+
+ meta = meta - meta.subMap('lane') + [num_lanes: num_lanes.toInteger(), read_group: read_group.toString(), data_type: 'bam', size: 1]
+
+ if (params.step != 'annotate') return [ meta - meta.subMap('lane'), bam, bai ]
+ else {
+ error("Samplesheet contains bam files but step is `annotate`. The pipeline is expecting vcf files for the annotation. Please check your samplesheet or adjust the step parameter.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+
+ // recalibration
+ } else if (table && cram) {
+ meta = meta + [id: meta.sample, data_type: 'cram']
+
+ if (!(params.step == 'mapping' || params.step == 'annotate')) return [ meta - meta.subMap('lane'), cram, crai, table ]
+ else {
+ error("Samplesheet contains cram files but step is `$params.step`. Please check your samplesheet or adjust the step parameter.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+
+ // recalibration when skipping MarkDuplicates
+ } else if (table && bam) {
+ meta = meta + [id: meta.sample, data_type: 'bam']
+
+ if (!(params.step == 'mapping' || params.step == 'annotate')) return [ meta - meta.subMap('lane'), bam, bai, table ]
+ else {
+ error("Samplesheet contains bam files but step is `$params.step`. Please check your samplesheet or adjust the step parameter.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+
+ // prepare_recalibration or variant_calling
+ } else if (cram) {
+ meta = meta + [id: meta.sample, data_type: 'cram']
+
+ if (!(params.step == 'mapping' || params.step == 'annotate')) return [ meta - meta.subMap('lane'), cram, crai ]
+ else {
+ error("Samplesheet contains cram files but step is `$params.step`. Please check your samplesheet or adjust the step parameter.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+
+ // prepare_recalibration when skipping MarkDuplicates or `--step markduplicates`
+ } else if (bam) {
+ meta = meta + [id: meta.sample, data_type: 'bam']
+
+ if (!(params.step == 'mapping' || params.step == 'annotate')) return [ meta - meta.subMap('lane'), bam, bai ]
+ else {
+ error("Samplesheet contains bam files but step is `$params.step`. Please check your samplesheet or adjust the step parameter.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+
+ // annotation
+ } else if (vcf) {
+ meta = meta + [id: meta.sample, data_type: 'vcf', variantcaller: variantcaller ?: '']
+
+ if (params.step == 'annotate') return [ meta - meta.subMap('lane'), vcf ]
+ else {
+ error("Samplesheet contains vcf files but step is `$params.step`. Please check your samplesheet or adjust the step parameter.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+ } else {
+ error("Missing or unknown field in csv file header. Please check your samplesheet")
+ }
+ }
+
+ if (params.step != 'annotate' && params.tools && !params.build_only_index) {
+ // Two checks for ensuring that the pipeline stops with a meaningful error message if
+ // 1. the sample-sheet only contains normal-samples, but some of the requested tools require tumor-samples, and
+ // 2. the sample-sheet only contains tumor-samples, but some of the requested tools require normal-samples.
+ input_sample.filter{ it[0].status == 1 }.ifEmpty{ // In this case, the sample-sheet contains no tumor-samples
+ if (!params.build_only_index) {
+ def tools_tumor = ['ascat', 'controlfreec', 'mutect2', 'msisensorpro']
+ def tools_tumor_asked = []
+ tools_tumor.each{ tool ->
+ if (params.tools.split(',').contains(tool)) tools_tumor_asked.add(tool)
+ }
+ if (!tools_tumor_asked.isEmpty()) {
+ error('The sample-sheet only contains normal-samples, but the following tools, which were requested with "--tools", expect at least one tumor-sample : ' + tools_tumor_asked.join(", "))
+ }
+ }
+ }
+
+ input_sample.filter{ it[0].status == 0 }.ifEmpty{ // In this case, the sample-sheet contains no normal/germline-samples
+ def tools_requiring_normal_samples = ['ascat', 'deepvariant', 'haplotypecaller', 'msisensorpro']
+ def requested_tools_requiring_normal_samples = []
+ tools_requiring_normal_samples.each{ tool_requiring_normal_samples ->
+ if (params.tools.split(',').contains(tool_requiring_normal_samples)) requested_tools_requiring_normal_samples.add(tool_requiring_normal_samples)
+ }
+ if (!requested_tools_requiring_normal_samples.isEmpty()) {
+ error('The sample-sheet only contains tumor-samples, but the following tools, which were requested by the option "tools", expect at least one normal-sample : ' + requested_tools_requiring_normal_samples.join(", "))
+ }
+ }
+ }
+
+ // Fails when wrongfull extension for intervals file
+ if (params.wes && !params.step == 'annotate') {
+ if (params.intervals && !params.intervals.endsWith("bed")) error("Target file specified with `--intervals` must be in BED format for targeted data")
+ else log.warn("Intervals file was provided without parameter `--wes`: Pipeline will assume this is Whole-Genome-Sequencing data.")
+ } else if (params.intervals && !params.intervals.endsWith("bed") && !params.intervals.endsWith("list")) error("Intervals file must end with .bed, .list, or .interval_list")
+
+ if (params.step == 'mapping' && params.aligner.contains("dragmap") && !(params.skip_tools && params.skip_tools.split(',').contains("baserecalibrator"))) {
+ log.warn("DragMap was specified as aligner. Base recalibration is not contained in --skip_tools. It is recommended to skip baserecalibration when using DragMap\nhttps://gatk.broadinstitute.org/hc/en-us/articles/4407897446939--How-to-Run-germline-single-sample-short-variant-discovery-in-DRAGEN-mode")
+ }
+
+ if (params.step == 'mapping' && params.aligner.contains("sentieon-bwamem") && params.umi_read_structure) {
+ error("Sentieon BWA is currently not compatible with FGBio UMI handeling. Please choose a different aligner.")
+ }
+
+ if (params.tools && params.tools.split(',').contains("sentieon_haplotyper") && params.joint_germline && (!params.sentieon_haplotyper_emit_mode || !(params.sentieon_haplotyper_emit_mode.contains('gvcf')))) {
+ error("When setting the option `--joint_germline` and including `sentieon_haplotyper` among the requested tools, please set `--sentieon_haplotyper_emit_mode` to include `gvcf`.")
+ }
+
+ // Fails or warns when missing files or params for ascat
+ if (params.tools && params.tools.split(',').contains('ascat')) {
+ if (!params.ascat_alleles) {
+ error("No allele files were provided for running ASCAT. Please provide a zip folder with allele files.")
+ }
+ if (!params.ascat_loci) {
+ error("No loci files were provided for running ASCAT. Please provide a zip folder with loci files.")
+ }
+ if (!params.ascat_loci_gc && !params.ascat_loci_rt) {
+ log.warn("No LogRCorrection performed in ASCAT. For LogRCorrection to run, please provide either loci gc files or both loci gc files and loci rt files.")
+ }
+ if (params.wes) {
+ log.warn("Default reference files not suited for running ASCAT on WES data. It's recommended to use the reference files provided here: https://github.com/Wedge-lab/battenberg#required-reference-files")
+ }
+ }
+
+ // Warns when missing files or params for mutect2
+ if (params.tools && params.tools.split(',').contains('mutect2')) {
+ if (!params.pon) {
+ log.warn("No Panel-of-normal was specified for Mutect2.\nIt is highly recommended to use one: https://gatk.broadinstitute.org/hc/en-us/articles/5358911630107-Mutect2\nFor more information on how to create one: https://gatk.broadinstitute.org/hc/en-us/articles/5358921041947-CreateSomaticPanelOfNormals-BETA-")
+ }
+ if (!params.germline_resource) {
+ log.warn("If Mutect2 is specified without a germline resource, no filtering will be done.\nIt is recommended to use one: https://gatk.broadinstitute.org/hc/en-us/articles/5358911630107-Mutect2")
+ }
+ if (params.pon && params.pon.contains("/Homo_sapiens/GATK/GRCh38/Annotation/GATKBundle/1000g_pon.hg38.vcf.gz")) {
+ log.warn("The default Panel-of-Normals provided by GATK is used for Mutect2.\nIt is highly recommended to generate one from normal samples that are technical similar to the tumor ones.\nFor more information: https://gatk.broadinstitute.org/hc/en-us/articles/360035890631-Panel-of-Normals-PON-")
+ }
+ }
+
+ // Fails when missing resources for baserecalibrator
+ // Warns when missing resources for haplotypecaller
+ if (!params.dbsnp && !params.known_indels) {
+ if (params.step in ['mapping', 'markduplicates', 'prepare_recalibration', 'recalibrate'] && (!params.skip_tools || (params.skip_tools && !params.skip_tools.split(',').contains('baserecalibrator')))) {
+ error("Base quality score recalibration requires at least one resource file. Please provide at least one of `--dbsnp` or `--known_indels`\nYou can skip this step in the workflow by adding `--skip_tools baserecalibrator` to the command.")
+ }
+ if (params.tools && (params.tools.split(',').contains('haplotypecaller') || params.tools.split(',').contains('sentieon_haplotyper') || params.tools.split(',').contains('sentieon_dnascope'))) {
+ log.warn "If GATK's Haplotypecaller, Sentieon's Dnascope or Sentieon's Haplotyper is specified, without `--dbsnp` or `--known_indels no filtering will be done. For filtering, please provide at least one of `--dbsnp` or `--known_indels`.\nFor more information see FilterVariantTranches (single-sample, default): https://gatk.broadinstitute.org/hc/en-us/articles/5358928898971-FilterVariantTranches\nFor more information see VariantRecalibration (--joint_germline): https://gatk.broadinstitute.org/hc/en-us/articles/5358906115227-VariantRecalibrator\nFor more information on GATK Best practice germline variant calling: https://gatk.broadinstitute.org/hc/en-us/articles/360035535932-Germline-short-variant-discovery-SNPs-Indels-"
+ }
+ }
+ if (params.joint_germline && (!params.tools || !(params.tools.split(',').contains('haplotypecaller') || params.tools.split(',').contains('sentieon_haplotyper') || params.tools.split(',').contains('sentieon_dnascope')))) {
+ error("The GATK's Haplotypecaller, Sentieon's Dnascope or Sentieon's Haplotyper should be specified as one of the tools when doing joint germline variant calling.) ")
+ }
+
+ if (
+ params.tools &&
+ (
+ params.tools.split(',').contains('haplotypecaller') ||
+ params.tools.split(',').contains('sentieon_haplotyper') ||
+ params.tools.split(',').contains('sentieon_dnascope')
+ ) &&
+ params.joint_germline &&
+ (
+ !params.dbsnp ||
+ !params.known_indels ||
+ !params.known_snps ||
+ params.no_intervals
+ )
+ ) {
+ log.warn("""If GATK's Haplotypecaller, Sentieon's Dnascope and/or Sentieon's Haplotyper is specified, \
+ but without `--dbsnp`, `--known_snps`, `--known_indels` or the associated resource labels (ie `known_snps_vqsr`), \
+ no variant recalibration will be done. For recalibration you must provide all of these resources.\nFor more information \
+ see VariantRecalibration: https://gatk.broadinstitute.org/hc/en-us/articles/5358906115227-VariantRecalibrator \n\
+ Joint germline variant calling also requires intervals in order to genotype the samples. \
+ As a result, if `--no_intervals` is set to `true` the joint germline variant calling will not be performed.""")
+ }
+
+ if (params.tools &&
+ params.tools.split(',').contains('sentieon_dnascope') &&
+ params.joint_germline &&
+ (
+ !params.sentieon_dnascope_emit_mode ||
+ !params.sentieon_dnascope_emit_mode.split(',').contains('gvcf')
+ )
+ ) {
+ error("When using Sentieon Dnascope for joint-germline variant-calling the option `--sentieon_dnascope_emit_mode` has to include `gvcf`.")
+ }
+
+ if (params.tools &&
+ params.tools.split(',').contains('sentieon_haplotyper') &&
+ params.joint_germline &&
+ (
+ !params.sentieon_haplotyper_emit_mode ||
+ !params.sentieon_haplotyper_emit_mode.split(',').contains('gvcf')
+ )
+ ) {
+ error("When using Sentieon Haplotyper for joint-germline variant-calling the option `--sentieon_haplotyper_emit_mode` has to include `gvcf`.")
+ }
+
+
+ // Fails when --joint_mutect2 is used without enabling mutect2
+ if (params.joint_mutect2 && (!params.tools || !params.tools.split(',').contains('mutect2'))) {
+ error("The mutect2 should be specified as one of the tools when doing joint somatic variant calling with Mutect2. (The mutect2 could be specified by adding `--tools mutect2` to the nextflow command.)")
+ }
+
+ // Fails when missing tools for variant_calling or annotate
+ if ((params.step == 'variant_calling' || params.step == 'annotate') && !params.tools) {
+ error("Please specify at least one tool when using `--step ${params.step}`.\nhttps://nf-co.re/sarek/parameters#tools")
+ }
+
+ // Fails when missing sex information for CNV tools
+ if (params.tools && (params.tools.split(',').contains('ascat') || params.tools.split(',').contains('controlfreec'))) {
+ input_sample.map{
+ if (it[0].sex == 'NA' ) {
+ error("Please specify sex information for each sample in your samplesheet when using '--tools' with 'ascat' or 'controlfreec'.\nhttps://nf-co.re/sarek/usage#input-samplesheet-configurations")
+ }
+ }
+ }
+
+ // Fails when bcftools annotate is used but no files are supplied
+ if (params.tools && params.tools.split(',').contains('bcfann') && !(params.bcftools_annotations && params.bcftools_annotations_tbi && params.bcftools_header_lines)) {
+ error("Please specify --bcftools_annotations, --bcftools_annotations_tbi, and --bcftools_header_lines, when using BCFTools annotations")
+ }
+
+ emit:
+ input_sample
+ }
+
+/*
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ FUNCTIONS
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+*/
+// Parse first line of a FASTQ file, return the flowcell id and lane number.
+def flowcellLaneFromFastq(path) {
+ // expected format:
+ // xx:yy:FLOWCELLID:LANE:... (seven fields)
+ // or
+ // FLOWCELLID:LANE:xx:... (five fields)
+ def line
+ path.withInputStream {
+ InputStream gzipStream = new java.util.zip.GZIPInputStream(it)
+ Reader decoder = new InputStreamReader(gzipStream, 'ASCII')
+ BufferedReader buffered = new BufferedReader(decoder)
+ line = buffered.readLine()
+ }
+ assert line.startsWith('@')
+ line = line.substring(1)
+ def fields = line.split(':')
+ String fcid
+
+ if (fields.size() >= 7) {
+ // CASAVA 1.8+ format, from https://support.illumina.com/help/BaseSpace_OLH_009008/Content/Source/Informatics/BS/FileFormat_FASTQ-files_swBS.htm
+ // "@File type Conventional base calls