-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathmain.nf
308 lines (241 loc) · 9.8 KB
/
main.nf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
#!/usr/bin/env nextflow
import groovy.yaml.YamlSlurper
nextflow.enable.dsl=2
/*
Developed by the Genome Institute of Singapore for
the National Precision Medicine Programme
Copyright: 2022 Genome Institute of Singapore
License: The MIT License (MIT)
See LICENSE for more copyright information
*/
/*
----------------------------------------------------------------------
FUNCTIONS
----------------------------------------------------------------------
*/
def helpMessage() {
log.info """
Usage: nextflow run main.nf -config nextflow.config -params-file params.yaml
-work-dir ./ --outdir ./
[-resume] [--keep_workdir] [--help]
Options:
-config Generic workflow settings
-params-file Sample-specific settings
-profile Job launch settings
-resume Re-use existing results (optional, omit to re-run from scratch)
--keep_workdir Keep work directory (optional, omit for auto-deletion)
--help Print this help message
""".stripIndent()
}
def nextflowMessage() {
log.info "N E X T F L O W ~ version ${workflow.nextflow.version} ${workflow.nextflow.build}"
}
def version_message() {
println("NPM-sample-qc ~ version ${workflow.manifest.version}")
}
def minimalInformationMessage() {
log.info "User name : " + workflow.userName
log.info "Command Line : " + workflow.commandLine
log.info "Project Dir : " + workflow.projectDir
log.info "Launch Dir : " + workflow.launchDir
log.info "Work Dir : " + workflow.workDir
log.info "Results Dir : " + params.publish_dir
log.info "Info Dir : " + params.info_dir
log.info "Profile : " + workflow.profile
}
def startMessage() {
this.nextflowMessage()
this.version_message()
this.minimalInformationMessage()
}
/*
----------------------------------------------------------------------
USAGE
----------------------------------------------------------------------
*/
if (params.help) exit 0, helpMessage()
/*
----------------------------------------------------------------------
LAUNCH INFO
----------------------------------------------------------------------
*/
startMessage()
/*
----------------------------------------------------------------------
PROCESSES
---------------------------------------------------------------------
*/
include { samtools_stats as samtools_stats_bam } from './modules/samtools'
include { samtools_stats as samtools_stats_cram } from './modules/samtools'
include { verifybamid2 as verifybamid2_bam } from './modules/verifybamid2'
include { verifybamid2 as verifybamid2_cram } from './modules/verifybamid2'
include { picard_collect_multiple_metrics as picard_collect_multiple_metrics_bam } from './modules/CollectMultipleMetrics'
include { picard_collect_multiple_metrics as picard_collect_multiple_metrics_cram } from './modules/CollectMultipleMetrics'
include { picard_collect_wgs_metrics as picard_collect_wgs_metrics_bam } from './modules/CollectWgsMetrics'
include { picard_collect_wgs_metrics as picard_collect_wgs_metrics_cram } from './modules/CollectWgsMetrics'
// include { picard_collect_variant_calling_metrics_vcf } from './modules/CollectVariantCallingMetrics'
include { bcftools_stats } from './modules/bcftools'
include { count_variants } from './modules/count_variants'
include { count_aln } from './modules/count_aln'
include { metric_aln } from './modules/metric_aln'
include { metric_variants } from './modules/metric_variants'
include { compile_aln_vcf } from './modules/compile_aln_vcf'
/*
----------------------------------------------------------------------
WORKFLOW
---------------------------------------------------------------------
*/
// main
workflow {
ref_fasta = file( params.reference )
ref_fasta_idx = file( params.reference + ".fai" )
autosomes_non_gap_regions = file( params.autosomes_non_gap_regions )
autosomes_non_gap_regions_bed = file( params.autosomes_non_gap_regions_bed )
vbi2_ud = file( params.vbi2_ud )
vbi2_bed = file( params.vbi2_bed )
vbi2_mean = file( params.vbi2_mean )
//ref_dbsnp = file( params.ref_dbsnp )
inputs = new YamlSlurper().parse(file(params.inputs_list))
Channel
.fromList(inputs['samples'])
.ifEmpty { ['biosample_id': params.biosample_id, 'aln': params.aln] }
.set { samples }
// Channel to get sample id mapping
Channel
samples.map { it.biosample_id }
.set { sample_ids }
// Create channel branches for bam and cram input type
Channel
samples.branch { rec ->
def aln_file = rec.aln ? file( rec.aln ) : null
bam: rec.biosample_id && aln_file?.extension == 'bam'
def bam_idx = file( "${rec.aln}.bai" )
return tuple( rec.biosample_id, aln_file, bam_idx )
cram: rec.biosample_id && aln_file?.extension == 'cram'
def cram_idx = file( "${rec.aln}.crai" )
return tuple( rec.biosample_id, aln_file, cram_idx )
}
.set { aln_inputs }
samtools_stats_bam( aln_inputs.bam, [] )
samtools_stats_cram( aln_inputs.cram, ref_fasta )
verifybamid2_bam( aln_inputs.bam, ref_fasta, vbi2_ud, vbi2_bed, vbi2_mean )
verifybamid2_cram( aln_inputs.cram, ref_fasta, vbi2_ud, vbi2_bed, vbi2_mean )
picard_collect_multiple_metrics_bam( aln_inputs.bam, [], [] )
picard_collect_multiple_metrics_cram( aln_inputs.cram, ref_fasta, ref_fasta_idx )
picard_collect_wgs_metrics_bam( aln_inputs.bam, autosomes_non_gap_regions, ref_fasta, ref_fasta_idx )
picard_collect_wgs_metrics_cram( aln_inputs.cram, autosomes_non_gap_regions, ref_fasta, ref_fasta_idx )
Channel
samples.branch { rec ->
def vcf_file = rec.vcf ? file( rec.vcf ) : null
output: rec.biosample_id && vcf_file
def vcf_idx = file( "${rec.vcf}.tbi" )
return tuple( rec.biosample_id, vcf_file, vcf_idx )
}
.set { vcf_inputs }
bcftools_stats( vcf_inputs )
//picard_collect_variant_calling_metrics_vcf( vcf_inputs, ref_dbsnp )
count_variants ( vcf_inputs, autosomes_non_gap_regions_bed )
// Conditional input option to rename metrics output file if vcf only given
Channel
samples.branch { rec ->
def aln_file = rec.aln ? file( rec.aln ) : null
count: rec.biosample_id && !aln_file
return tuple( rec.biosample_id )
}
.view()
.set { variants_cond }
Channel
.empty()
variants_cond
.join( count_variants.out.metrics )
.view()
.set { ch_variants_metric }
metric_variants ( ch_variants_metric )
/*
// channel for samplelist vcf input file processed outputs
Channel
.empty()
sample_ids
.join( count_variants.out )
.join( bcftools_stats.out )
.set { vcf_qc }
*/
// channel for samplelist input file type bam processed outputs
Channel
.empty()
sample_ids
.join( samtools_stats_bam.out.metrics )
.join( picard_collect_multiple_metrics_bam.out.metrics )
.join( picard_collect_wgs_metrics_bam.out.metrics )
.join( verifybamid2_bam.out.metrics, remainder: true )
.set { ch_bam }
// channel for samplelist input file type cram processed outputs
Channel
.empty()
sample_ids
.join( samtools_stats_cram.out.metrics )
.join( picard_collect_multiple_metrics_cram.out.metrics )
.join( picard_collect_wgs_metrics_cram.out.metrics )
.join( verifybamid2_cram.out.metrics, remainder: true )
.set { ch_cram }
// channel to mix the bam/cram process outputs and map the verifybamid2 'null' to '[]' if the verifybamid2 process output is empty
ch_bam.mix(ch_cram)
.map { sample, stats, quality, wgs_coverage, freemix -> [ sample, stats, quality, wgs_coverage, freemix ?: [] ] }
.set { aln_count_in }
count_aln ( aln_count_in )
// Conditional input option to rename metrics output file if aln only given
Channel
samples.branch { rec ->
def vcf_file = rec.vcf ? file( rec.vcf ) : null
count: rec.biosample_id && !vcf_file
return tuple( rec.biosample_id )
}
.view()
.set { aln_cond }
Channel
.empty()
aln_cond
.join( count_aln.out.metrics )
.view()
.set { ch_aln_metric }
metric_aln ( ch_aln_metric )
// Conditional input option to compile metrics if both input aln and vcf given
Channel
samples.branch { rec ->
def aln_file = rec.aln ? file( rec.aln ) : null
def vcf_file = rec.vcf ? file( rec.vcf ) : null
count: rec.biosample_id && aln_file && vcf_file
return tuple( rec.biosample_id )
}
//.view()
.set { inputs_cond }
// channel for samplelist input file type both aln and vcf processed outputs
Channel
.empty()
//sample_ids
//.join( inputs_cond )
inputs_cond
.join( count_aln.out.metrics )
.join( count_variants.out.metrics )
//.view()
.set { ch_count }
compile_aln_vcf ( ch_count )
}
/*
----------------------------------------------------------------------
COMPLETION INFO
----------------------------------------------------------------------
*/
workflow.onComplete {
log.info "Started : " + workflow.start
log.info "Completed : " + workflow.complete
log.info "Duration : " + workflow.duration
log.info "Status : " + workflow.success
log.info "Publish dir : " + params.publish_dir
}
workflow.onError {
log.info "Workflow execution stopped with the following message:"
log.info "Exit status : " + workflow.exitStatus
log.info "Error message : " + workflow.errorMessage
log.info "Error report : " + (workflow.errorReport ?: '-')
}