Skip to content

Commit

Permalink
Merge pull request #1 from jts/fix_dsl2_syntax
Browse files Browse the repository at this point in the history
updated code to support nextflow 22.04.5, fixed dls2 syntax
  • Loading branch information
rdeborja authored Oct 25, 2022
2 parents b6d4d46 + eb2fa7c commit 6ecf07b
Show file tree
Hide file tree
Showing 9 changed files with 40 additions and 40 deletions.
4 changes: 2 additions & 2 deletions main.nf
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env nextflow

// enable dsl2
nextflow.preview.dsl = 2
nextflow.enable.dsl=2

// include modules
include printHelp from './modules/help.nf'
include { printHelp } from './modules/help.nf'

// import subworkflows
include {articNcovNanopore} from './workflows/articNcovNanopore.nf'
Expand Down
16 changes: 8 additions & 8 deletions modules/artic.nf
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ process articMinIONMedaka {
output:
file("${sampleName}.*")

tuple sampleName, file("${sampleName}.primertrimmed.rg.sorted.bam"), emit: ptrim
tuple sampleName, file("${sampleName}.sorted.bam"), emit: mapped
tuple sampleName, file("${sampleName}.consensus.fasta"), emit: consensus_fasta
tuple val(sampleName), file("${sampleName}.primertrimmed.rg.sorted.bam"), emit: ptrim
tuple val(sampleName), file("${sampleName}.sorted.bam"), emit: mapped
tuple val(sampleName), file("${sampleName}.consensus.fasta"), emit: consensus_fasta

script:
// Make an identifier from the fastq filename
Expand Down Expand Up @@ -101,9 +101,9 @@ process articMinIONNanopolish {
output:
file("${sampleName}.*")

tuple sampleName, file("${sampleName}.primertrimmed.rg.sorted.bam"), emit: ptrim
tuple sampleName, file("${sampleName}.sorted.bam"), emit: mapped
tuple sampleName, file("${sampleName}.consensus.fasta"), emit: consensus_fasta
tuple val(sampleName), file("${sampleName}.primertrimmed.rg.sorted.bam"), emit: ptrim
tuple val(sampleName), file("${sampleName}.sorted.bam"), emit: mapped
tuple val(sampleName), file("${sampleName}.consensus.fasta"), emit: consensus_fasta

script:
// Make an identifier from the fastq filename
Expand Down Expand Up @@ -142,10 +142,10 @@ process articRemoveUnmappedReads {
cpus 1

input:
tuple(sampleName, path(bamfile))
tuple(val(sampleName), path(bamfile))

output:
tuple( sampleName, file("${sampleName}.mapped.sorted.bam"))
tuple( val(sampleName), file("${sampleName}.mapped.sorted.bam"))

script:
"""
Expand Down
44 changes: 22 additions & 22 deletions modules/illumina.nf
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
process readTrimming {
/**
* Trims paired fastq using trim_galore (https://github.com/FelixKrueger/TrimGalore)
* @input tuple(sampleName, path(forward), path(reverse))
* @output trimgalore_out tuple(sampleName, path("*_val_1.fq.gz"), path("*_val_2.fq.gz"))
* @input tuple(val(sampleName), path(forward), path(reverse))
* @output trimgalore_out tuple(val(sampleName), path("*_val_1.fq.gz"), path("*_val_2.fq.gz"))
*/

tag { sampleName }
Expand All @@ -12,10 +12,10 @@ process readTrimming {
cpus 1

input:
tuple(sampleName, path(forward), path(reverse))
tuple(val(sampleName), path(forward), path(reverse))

output:
tuple(sampleName, path("*_val_1.fq.gz"), path("*_val_2.fq.gz")) optional true
tuple(val(sampleName), path("*_val_1.fq.gz"), path("*_val_2.fq.gz")) optional true

script:
"""
Expand All @@ -30,8 +30,8 @@ process readTrimming {
process filterResidualAdapters {
/**
* Discard reads that contain residual adapter sequences that indicate trimming may have failed
* @input tuple(sampleName, path(forward), path(reverse))
* @output untrim_filter_out tuple(sampleName, path("*_val_1.fq.gz"), path("*_val_2.fq.gz"))
* @input tuple(val(sampleName), path(forward), path(reverse))
* @output untrim_filter_out tuple(val(sampleName), path("*_val_1.fq.gz"), path("*_val_2.fq.gz"))
*/

tag { sampleName }
Expand All @@ -41,10 +41,10 @@ process filterResidualAdapters {
cpus 1

input:
tuple(sampleName, path(forward), path(reverse))
tuple(val(sampleName), path(forward), path(reverse))

output:
tuple(sampleName, path("*1_posttrim_filter.fq.gz"), path("*2_posttrim_filter.fq.gz")) optional true
tuple(val(sampleName), path("*1_posttrim_filter.fq.gz"), path("*2_posttrim_filter.fq.gz")) optional true

script:
"""
Expand Down Expand Up @@ -91,10 +91,10 @@ process readMapping {
publishDir "${params.outdir}/${task.process.replaceAll(":","_")}", pattern: "${sampleName}.sorted.bam", mode: 'copy'

input:
tuple sampleName, path(forward), path(reverse), path(ref), path("*")
tuple val(sampleName), path(forward), path(reverse), path(ref), path("*")

output:
tuple(sampleName, path("${sampleName}.sorted.bam"))
tuple(val(sampleName), path("${sampleName}.sorted.bam"))

script:
"""
Expand All @@ -111,11 +111,11 @@ process trimPrimerSequences {
publishDir "${params.outdir}/${task.process.replaceAll(":","_")}", pattern: "${sampleName}.mapped.primertrimmed.sorted.bam", mode: 'copy'

input:
tuple sampleName, path(bam), path(bedfile)
tuple val(sampleName), path(bam), path(bedfile)

output:
tuple sampleName, path("${sampleName}.mapped.bam"), emit: mapped
tuple sampleName, path("${sampleName}.mapped.primertrimmed.sorted.bam" ), emit: ptrim
tuple val(sampleName), path("${sampleName}.mapped.bam"), emit: mapped
tuple val(sampleName), path("${sampleName}.mapped.primertrimmed.sorted.bam" ), emit: ptrim

script:
if (params.allowNoprimer){
Expand All @@ -138,10 +138,10 @@ process callVariants {
publishDir "${params.outdir}/${task.process.replaceAll(":","_")}", pattern: "${sampleName}.variants.tsv", mode: 'copy'

input:
tuple(sampleName, path(bam), path(ref))
tuple(val(sampleName), path(bam), path(ref))

output:
tuple sampleName, path("${sampleName}.variants.tsv")
tuple val(sampleName), path("${sampleName}.variants.tsv")

script:
"""
Expand All @@ -158,10 +158,10 @@ process makeConsensus {
publishDir "${params.outdir}/${task.process.replaceAll(":","_")}", pattern: "${sampleName}.primertrimmed.consensus.fa", mode: 'copy'

input:
tuple(sampleName, path(bam))
tuple(val(sampleName), path(bam))

output:
tuple(sampleName, path("${sampleName}.primertrimmed.consensus.fa"))
tuple(val(sampleName), path("${sampleName}.primertrimmed.consensus.fa"))

script:
"""
Expand All @@ -179,11 +179,11 @@ process callConsensusFreebayes {
publishDir "${params.outdir}/${task.process.replaceAll(":","_")}", pattern: "${sampleName}.variants.norm.vcf", mode: 'copy'

input:
tuple(sampleName, path(bam), path(ref))
tuple(val(sampleName), path(bam), path(ref))

output:
tuple sampleName, path("${sampleName}.consensus.fasta")
tuple sampleName, path("${sampleName}.variants.norm.vcf")
tuple val(sampleName), path("${sampleName}.consensus.fasta")
tuple val(sampleName), path("${sampleName}.variants.norm.vcf")

script:
"""
Expand Down Expand Up @@ -235,10 +235,10 @@ process cramToFastq {
*/

input:
tuple sampleName, file(cram)
tuple val(sampleName), file(cram)

output:
tuple sampleName, path("${sampleName}_1.fastq.gz"), path("${sampleName}_2.fastq.gz")
tuple val(sampleName), path("${sampleName}_1.fastq.gz"), path("${sampleName}_2.fastq.gz")

script:
"""
Expand Down
6 changes: 3 additions & 3 deletions modules/out.nf
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ process bamToCram {
publishDir "${params.outdir}/${task.process.replaceAll(":","_")}", pattern: "${bam.baseName}.cram.crai", mode: 'copy'

input:
tuple(sampleName, path(bam), path(ref))
tuple(val(sampleName), path(bam), path(ref))

output:
tuple sampleName, path("${bam.baseName}.cram"), emit: cramed
tuple sampleName, path("${bam.baseName}.cram.crai"), emit: cramedidx
tuple val(sampleName), path("${bam.baseName}.cram"), emit: cramed
tuple val(sampleName), path("${bam.baseName}.cram.crai"), emit: cramedidx

script:
"""
Expand Down
2 changes: 1 addition & 1 deletion modules/qc.nf
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ process makeQCCSV {
publishDir "${params.outdir}/qc_plots", pattern: "${sampleName}.depth.png", mode: 'copy'

input:
tuple sampleName, path(bam), path(fasta), path(ref)
tuple val(sampleName), path(bam), path(fasta), path(ref)

output:
path "${params.prefix}.${sampleName}.qc.csv", emit: csv
Expand Down
2 changes: 1 addition & 1 deletion modules/upload.nf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ process collateSamples {
publishDir "${params.outdir}/qc_pass_climb_upload/${params.prefix}", pattern: "${sampleName}", mode: 'copy'

input:
tuple(sampleName, path(bam), path(fasta))
tuple(val(sampleName), path(bam), path(fasta))

output:
path("${sampleName}")
Expand Down
2 changes: 1 addition & 1 deletion modules/utils.nf
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ process performHostFilter {
input:
tuple(val(sampleId), path(forward), path(reverse))
output:
tuple sampleId, path("${sampleId}_hostfiltered_R1.fastq.gz"), path("${sampleId}_hostfiltered_R2.fastq.gz"), emit: fastqPairs
tuple val(sampleId), path("${sampleId}_hostfiltered_R1.fastq.gz"), path("${sampleId}_hostfiltered_R2.fastq.gz"), emit: fastqPairs

script:
"""
Expand Down
2 changes: 1 addition & 1 deletion workflows/articNcovNanopore.nf
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// ARTIC ncov workflow

// enable dsl2
nextflow.preview.dsl = 2
nextflow.enable.dsl = 2

// import modules
include {articDownloadScheme} from '../modules/artic.nf'
Expand Down
2 changes: 1 addition & 1 deletion workflows/illuminaNcov.nf
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env nextflow

// enable dsl2
nextflow.preview.dsl = 2
nextflow.enable.dsl = 2

// import modules
include {articDownloadScheme } from '../modules/artic.nf'
Expand Down

0 comments on commit 6ecf07b

Please sign in to comment.