-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathnextflow.config
278 lines (225 loc) · 10.2 KB
/
nextflow.config
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
/**
* # Parameters for nf-maccoss-trex
*
* A NextFlow pipeline for analyzing data-ind
*/
params {
/** \group{Input/Output Parameters} */
// the data to be quantified
quant_spectra_dir = null // directory containing the raw data
quant_spectra_glob = '*.raw' // which files in this directory to use
// the data to be used to generate a chromatogram library (e.g. narrow window GPF data)
chromatogram_library_spectra_dir = null // directory containing the raw data
chromatogram_library_spectra_glob = '*.raw' // which files in this directory to use
spectral_library = null // can be blib, dlib, or elib
fasta = null // background FASTA
// placeholders for old skyline parameters
skyline_document_name = null
skyline_template_file = null
skip_skyline = null
skyline_skyr_file = null
// Optional PDC study settings
pdc.client_args = ''
pdc.study_id = null
pdc.n_raw_files = null
pdc.metadata_tsv = null
pdc.gene_level_data = null
// The final skyline document will be named using this name. For example,
// if skyline_custom_name = 'human_dia' then the final Skyline document
// will be named "human_dia.sky.zip". When importing into PanoramaWeb--this
// name will appear in the table of imported Skyline documents.
skyline.document_name = 'final'
// the search engine to use: must be either 'encyclopedia' or 'diann'
search_engine = 'encyclopedia'
// default parameters for Encyclopedia searches, can be overridden by users
// todo: follow up on these options: -quantifyAcrossSamples true -scoringBreadthType window
encyclopedia.quant.params = '-enableAdvancedOptions -v2scoring'
encyclopedia.chromatogram.params = '-enableAdvancedOptions -v2scoring'
// default parameters for DIA-NN
diann.params = '--unimod4 --qvalue 0.01 --cut \'K*,R*,!*P\' --reanalyse --smart-profiling'
// whether or not to save the output from encyclopedia running on individual raw/mzML
// files (e.g. .dia or .features.txt files) to the results directory
// setting to false can save considerable local disk space if running on AWS Batch
// the generated chromatogram library (elib) will always be saved, regardless of this setting
encyclopedia.save_output = true
// optional user-supplied parameters
email = null // email to notify of workflow outcome, leave null to send no email
skyline.template_file = null // the skyline template, if null use default_skyline_template_file
panorama_upload = false; // whether or not to upload the results to PanoramaWeb
msconvert.do_demultiplex = true; // whether or not to demultiplex with msconvert
msconvert.do_simasspectra = true; // whether or not to do simAsSpectra with msconvert
msconvert.mz_shift_ppm = null // shift all mz values by n ppn.
// If set to true, only run msconvert and stop. Resulting mzML files will be saved to the
// "msconvert" subdirectory of the results directory.
msconvert_only = false
// Parameters related to PanoramaWeb
panorama.domain = 'https://panoramaweb.org'
panorama.public.key = '7d503a4147133c448c6eaf83bc9b8bc22ace4b7f6d36ca61c9d1ca836c510d10'
panorama.upload = false // Whether or not to upload to PanoramaWeb
panorama.upload_url = null // The webdav URL of a folder to hold all uploaded files
panorama.import_skyline = false // whether or not to import the Skyline into Panorama's internal database
// replicate metadata
replicate_metadata = null
// Skip creating a Skyline document
skyline.skip = false
// Set to import a Skyline .skyr file and run any included reports at the end of the workflow
skyline.skyr_file = null
// Minimize Skyline document?
skyline.minimize = false
skyline.group_by_gene = false
skyline.protein_parsimony = false
// Whether or not to use hardlinks with Skyline
skyline.use_hardlinks = false
// Skip QC report generation
qc_report.skip = true
// Normalization method to use for plots in QC report
qc_report.normalization_method = 'median'
// List of protein names to plot retention times for
qc_report.standard_proteins = null
// List of metadata variables to color PCA plots by
qc_report.color_vars = null
// Export matrices of normalized precursor and protein quantities?
qc_report.export_tables = false
// Skyline report templates
qc_report.precursor_report_template = 'https://raw.githubusercontent.com/ajmaurais/DIA_QC_report/master/resources/precursor_quality.skyr'
qc_report.replicate_report_template = 'https://raw.githubusercontent.com/ajmaurais/DIA_QC_report/master/resources/replicate_quality.skyr'
// general workflow params, can be changed
result_dir = 'results/nf-skyline-dia-ms' /** \type{str} Where results will be saved. */
report_dir = 'reports/nf-skyline-dia-ms' /** \type{str} Where results will be saved. */
// use this if no user-supplied skyline template file -- suitable for EncyclopeDIA and DiaNN
default_skyline_template_file = 'https://github.com/mriffle/nf-skyline-dia-ms/raw/main/resources/template.sky.zip'
// AWS Batch params
aws.region = 'us-west-2'
aws.batch.cliPath = '/usr/local/aws-cli/v2/current/bin/aws'
aws.batch.logsGroup = '/batch/tei-nextflow-batch'
aws.batch.maxConnections = 20
aws.batch.connectionTimeout = 10000
aws.batch.uploadStorageClass = 'INTELLIGENT_TIERING'
aws.batch.storageEncryption = 'AES256'
aws.batch.retryMode = 'standard'
}
plugins {
id 'nf-amazon'
}
docker {
enabled = true
}
aws {
batch {
// NOTE: this setting is only required if the AWS CLI tool is installed in a custom AMI
cliPath = params.aws.batch.cliPath
logsGroup = params.aws.batch.logsGroup
maxConnections = params.aws.batch.maxConnections
connectionTimeout = params.aws.batch.connectionTimeout
uploadStorageClass = params.aws.batch.uploadStorageClass
storageEncryption = params.aws.batch.storageEncryption
retryMode = params.aws.batch.retryMode
}
region = params.aws.region
}
// Execution Profiles
profiles {
/*
* Params for running pipeline on the local computer (e.g.:
* your laptop). These can be overridden in the local config file.
*/
standard {
process.executor = 'local'
// limit nextflow to running 1 task at a time
executor.queueSize = 1
params.max_memory = '12.GB'
params.max_cpus = 8
params.max_time = '240.h'
// where to cache mzml files after running msconvert
params.mzml_cache_directory = '/data/mass_spec/nextflow/nf-skyline-dia-ms/mzml_cache'
params.panorama_cache_directory = '/data/mass_spec/nextflow/panorama/raw_cache'
}
aws {
process.executor = 'awsbatch'
process.queue = 'nextflow_basic_ec2'
// params for running pipeline on aws batch
// These can be overridden in local config file
// max params allowed for your AWS Batch compute environment
params.max_memory = '250.GB'
params.max_cpus = 32
params.max_time = '240.h'
// where to cache mzml files after running msconvert
params.mzml_cache_directory = 's3://mc-tei-rex-nextflow-dda/dia/mzml_cache'
params.panorama_cache_directory = 's3://mc-tei-rex-nextflow-dda/panorama_cache'
}
slurm {
process.executor = 'slurm'
params.max_memory = '12.GB'
params.max_cpus = 8
params.max_time = '240.h'
// where to cache mzml files after running msconvert
params.mzml_cache_directory = '/data/mass_spec/nextflow/nf-skyline-dia-ms/mzml_cache'
params.panorama_cache_directory = '/data/mass_spec/nextflow/panorama/raw_cache'
}
}
// Manifest
manifest {
name = 'nf-skyline-dia-ms'
author = 'Michael Riffle'
homePage = 'https://github.com/mriffle/nf-skyline-dia-ms'
description = 'DIA workflows for TEI-REX project'
mainScript = 'main.nf'
nextflowVersion = '!>=21.10.3'
}
// Capture exit codes from upstream processes when piping
process.shell = ['/bin/bash', '-euo', 'pipefail']
def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
timeline {
enabled = true
file = "${params.report_dir}/execution_timeline_${trace_timestamp}.html"
}
report {
enabled = true
file = "${params.report_dir}/execution_report_${trace_timestamp}.html"
}
trace {
enabled = true
file = "${params.report_dir}/execution_trace_${trace_timestamp}.txt"
}
dag {
enabled = false
file = "${params.report_dir}/pipeline_dag_${trace_timestamp}.html"
}
// Load base.config by default for all pipelines
includeConfig 'conf/base.config'
// Load the images to use for all processes
includeConfig 'container_images.config'
// Load the output file directories
includeConfig 'conf/output_directories.config'
// Function to ensure that resource requirements don't go beyond
// a maximum limit. Copied from the nf-core template.
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min( obj, params.max_cpus as int )
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}