Skip to content

Commit

Permalink
Merge pull request #507 from jfy133/eva-mag-fix
Browse files Browse the repository at this point in the history
Re-add cpu/memory specifications to MAG @ EVA
  • Loading branch information
alexhbnr authored Jun 21, 2023
2 parents 53c8f51 + 90e8d44 commit 930c83d
Showing 1 changed file with 54 additions and 8 deletions.
62 changes: 54 additions & 8 deletions conf/pipeline/mag/eva.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,30 @@ process {
}

withLabel:process_single {
cpus = { check_max( 1 , 'cpus' ) }
memory = { check_max( 6.GB * task.attempt, 'memory' ) }
time = 365.d
}
withLabel:process_low {
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
memory = { check_max( 12.GB * task.attempt, 'memory' ) }
time = 365.d
}
withLabel:process_medium {
cpus = { check_max( 6 * task.attempt, 'cpus' ) }
memory = { check_max( 36.GB * task.attempt, 'memory' ) }
time = 365.d
}
withLabel:process_high {
cpus = { check_max( 12 * task.attempt, 'cpus' ) }
memory = { check_max( 72.GB * task.attempt, 'memory' ) }
time = 365.d
}
withLabel:process_long {
time = 365.d
}
withLabel:process_high_memory {
memory = { check_max( 200.GB * task.attempt, 'memory' ) }
time = 365.d
}
withLabel:error_ignore {
Expand All @@ -50,82 +59,119 @@ process {
}

withName: BOWTIE2_HOST_REMOVAL_BUILD {
cpus = { check_max (10 * task.attempt, 'cpus' ) }
memory = { check_max (20.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: BOWTIE2_HOST_REMOVAL_ALIGN {
cpus = { check_max (10 * task.attempt, 'cpus' ) }
memory = { check_max (10.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: BOWTIE2_PHIX_REMOVAL_ALIGN {
cpus = { check_max (4 * task.attempt, 'cpus' ) }
memory = { check_max (8.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: PORECHOP {
cpus = { check_max (4 * task.attempt, 'cpus' ) }
memory = { check_max (30.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: NANOLYSE {
cpus = { check_max (2 * task.attempt, 'cpus' ) }
memory = { check_max (10.GB * task.attempt, 'memory' ) }
time = 365.d
}
//filtlong: exponential increase of memory and time with attempts
withName: FILTLONG {
cpus = { check_max (8 * task.attempt , 'cpus' ) }
memory = { check_max (64.GB * (2**(task.attempt-1)), 'memory' ) }
time = 365.d
}
withName: CENTRIFUGE {
cpus = { check_max (8 * task.attempt, 'cpus' ) }
memory = { check_max (40.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: KRAKEN2 {
cpus = { check_max (8 * task.attempt, 'cpus' ) }
memory = { check_max (40.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: KRONA {
cpus = { check_max (8 * task.attempt, 'cpus' ) }
memory = { check_max (20.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: CAT_DB_GENERATE {
memory = { check_max (200.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: CAT {
cpus = { check_max (8 * task.attempt, 'cpus' ) }
memory = { check_max (40.GB * task.attempt, 'memory' ) }
time = 365.d
}
withName: GTDBTK_CLASSIFYWF {
cpus = { check_max (10 * task.attempt, 'cpus' ) }
memory = { check_max (128.GB * task.attempt, 'memory' ) }
time = 365.d
}
//MEGAHIT returns exit code 250 when running out of memory
withName: MEGAHIT {
cpus = { check_megahit_cpus (8, task.attempt ) }
memory = { check_max (40.GB * task.attempt, 'memory' ) }
time = 365.d
errorStrategy = { task.exitStatus in ((130..145) + 104 + 250) ? 'retry' : 'finish' }
}
//SPAdes returns error(1) if it runs out of memory (and for other reasons as well...)!
//exponential increase of memory and time with attempts, keep number of threads to enable reproducibility
withName: SPADES {
time = 365.d
errorStrategy = { task.exitStatus in ((130..145) + 104 + 21 + 1) ? 'retry' : 'finish' }
cpus = { check_spades_cpus (10, task.attempt) }
memory = { check_max (64.GB * (2**(task.attempt-1)), 'memory' ) }
time = 365.d
errorStrategy = { task.exitStatus in [143,137,21,1] ? 'retry' : 'finish' }
maxRetries = 5
}
withName: SPADESHYBRID {
time = 365.d
errorStrategy = { task.exitStatus in ((130..145) + 104 + 21 + 1) ? 'retry' : 'finish' }
cpus = { check_spadeshybrid_cpus (10, task.attempt) }
memory = { check_max (64.GB * (2**(task.attempt-1)), 'memory' ) }
time = { check_max (24.h * (2**(task.attempt-1)), 'time' ) }
errorStrategy = { task.exitStatus in [143,137,21,1] ? 'retry' : 'finish' }
maxRetries = 5
}
//returns exit code 247 when running out of memory
withName: BOWTIE2_ASSEMBLY_ALIGN {
time = 365.d
errorStrategy = { task.exitStatus in ((130..145) + 104 + 247) ? 'retry' : 'finish' }
cpus = { check_max (2 * task.attempt, 'cpus' ) }
memory = { check_max (8.GB * task.attempt, 'memory' ) }
time = { check_max (24.h * (2**(task.attempt-1)), 'time' ) }
errorStrategy = { task.exitStatus in [143,137,104,134,139,247] ? 'retry' : 'finish' }
}
withName: METABAT2_METABAT2 {
time = 365.d
cpus = { check_max (8 * task.attempt, 'cpus' ) }
memory = { check_max (20.GB * task.attempt, 'memory' ) }
time = { check_max (24.h * (2**(task.attempt-1)), 'time' ) }
}
withName: MAG_DEPTHS {
memory = { check_max (16.GB * task.attempt, 'memory' ) }
time = { check_max (24.h * (2**(task.attempt-1)), 'time' ) }
}
withName: BUSCO {
cpus = { check_max (8 * task.attempt, 'cpus' ) }
memory = { check_max (20.GB * task.attempt, 'memory' ) }
time = { check_max (24.h * (2**(task.attempt-1)), 'time' ) }
}

withName: MAXBIN2 {
// often fails when insufficient information, so we allow it to gracefully fail without failing the pipeline
errorStrategy = { task.exitStatus in [ 1, 255 ] ? 'ignore' : 'retry' }
time = { check_max (24.h * (2**(task.attempt-1)), 'time' ) }
}

withName: DASTOOL_DASTOOL {
// if SCGs not found, bins cannot be assigned and DAS_tool will die with exit status 1
errorStrategy = { task.exitStatus in ((130..145) + 104 + 21 + 1) ? 'retry' : task.exitStatus == 1 ? 'ignore' : 'finish' }
errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : task.exitStatus == 1 ? 'ignore' : 'finish' }
time = { check_max (24.h * (2**(task.attempt-1)), 'time' ) }
}
}

0 comments on commit 930c83d

Please sign in to comment.