-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmethods.config
82 lines (68 loc) · 3.19 KB
/
methods.config
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
includeConfig "${projectDir}/external/pipeline-Nextflow-config/config/csv/csv_parser.config"
includeConfig "${projectDir}/external/pipeline-Nextflow-config/config/bam/bam_parser.config"
includeConfig "${projectDir}/external/pipeline-Nextflow-config/config/methods/common_methods.config"
includeConfig "${projectDir}/external/pipeline-Nextflow-config/config/schema/schema.config"
includeConfig "${projectDir}/external/pipeline-Nextflow-config/config/retry/retry.config"
methods {
set_output_dirs = {
def output_dir_root = "${params.output_dir}/${manifest.name}-${manifest.version}/${params.patient_id}"
params.output_dir_base = "${output_dir_root}/${params.docker_image_gatk.split("/")[1].replace(':', '-').toUpperCase()}"
def tz = TimeZone.getTimeZone("UTC")
def date = new Date().format("yyyyMMdd'T'HHmmss'Z'", tz)
params.log_output_dir = "${output_dir_root}/log-${manifest.name}-${manifest.version}-${date}"
}
set_pipeline_logs = {
trace.enabled = true
trace.file = "${params.log_output_dir}/nextflow-log/trace.txt"
timeline.enabled = true
timeline.file = "${params.log_output_dir}/nextflow-log/timeline.html"
report.enabled = true
report.file = "${params.log_output_dir}/nextflow-log/report.html"
}
set_process = {
process.cache = params.cache_intermediate_pipeline_steps
}
detect_mode = {
// Detect whether job is for targeted sample
params.is_targeted = (params.intervals) ? true : false
params.is_NT_paired = (params.input.BAM.containsKey('normal') && params.input.BAM.containsKey('tumor'))
}
set_ids_from_bams = {
params.samples_to_process = [] as Set
params.input.BAM.each { k, v ->
v.each { bam_path ->
def bam_header = bam_parser.parse_bam_header(bam_path)
def sm_tags = bam_header['read_group'].collect{ it['SM'] }.unique()
if (sm_tags.size() != 1) {
throw new Exception("${bam_path} contains multiple samples! Please run pipeline with single sample BAMs.")
}
params.samples_to_process.add(['id': sm_tags[0], 'path': bam_path, 'sample_type': k])
}
}
}
modify_base_allocations = {
if (!(params.containsKey('base_resource_update') && params.base_resource_update)) {
return
}
params.base_resource_update.each { resource, updates ->
updates.each { processes, multiplier ->
def processes_to_update = (custom_schema_types.is_string(processes)) ? [processes] : processes
methods.update_base_resource_allocation(resource, multiplier, processes_to_update)
}
}
}
setup = {
methods.set_env()
schema.load_custom_types("${projectDir}/config/custom_schema_types.config")
schema.validate()
methods.set_resources_allocation()
methods.modify_base_allocations()
methods.set_ids_from_bams()
methods.set_output_dirs()
methods.set_pipeline_logs()
methods.set_process()
methods.detect_mode()
retry.setup_retry()
methods.setup_docker_cpus()
}
}