Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
* [#176](https://github.com/nf-core/eager/pull/176) - Increase runtime for DamageProfiler on [large reference genomes](https://github.com/nf-core/eager/issues/173)
* [#172](https://github.com/nf-core/eager/pull/152) - DamageProfiler errors [won't crash entire pipeline anymore](https://github.com/nf-core/eager/issues/171)
* [#174](https://github.com/nf-core/eager/pull/190) - Publish DeDup files [properly](https://github.com/nf-core/eager/issues/183)
* TBF - Fix reference [issues](https://github.com/nf-core/eager/issues/150)
* [#196](https://github.com/nf-core/eager/pull/190) - Fix reference [issues](https://github.com/nf-core/eager/issues/150)
* [#196](https://github.com/nf-core/eager/pull/190) - Fix issues with PE data being mapped incompletely

### `Dependencies`

Expand Down
27 changes: 11 additions & 16 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -245,9 +245,10 @@ if("${params.fasta}".endsWith(".gz")){
file zipped_fasta

output:
file "*.fasta" into fasta_for_indexing
file "*.{fa,fn,fna,fasta}" into fasta_for_indexing

script:
rm_zip = zipped_fasta - '.gz'
"""
pigz -f -d -p ${task.cpus} $zipped_fasta
"""
Expand Down Expand Up @@ -486,13 +487,12 @@ process makeFastaIndex {
file where_are_my_files

output:
file "${base}.fasta.fai" into ch_fasta_faidx_index
file "*.fai" into ch_fasta_faidx_index
file "where_are_my_files.txt"

script:
base = "${fasta.baseName}"
"""
samtools faidx "${base}.fasta"
samtools faidx $fasta
"""
}

Expand All @@ -516,16 +516,12 @@ process makeSeqDict {
file where_are_my_files

output:
file "seq_dict/*.dict" into ch_seq_dict
file "*.dict" into ch_seq_dict
file "where_are_my_files.txt"

script:
base = "${fasta.baseName}.fasta"
"""
mkdir -p seq_dict
mv $fasta "seq_dict/${base}"
cd seq_dict
picard -Xmx${task.memory.toMega()}M -Xms${task.memory.toMega()}M CreateSequenceDictionary R=$base O="${fasta.baseName}.dict"
picard -Xmx${task.memory.toMega()}M -Xms${task.memory.toMega()}M CreateSequenceDictionary R=$fasta O="${fasta.baseName}.dict"
"""
}

Expand Down Expand Up @@ -679,7 +675,7 @@ process adapter_removal {
* STEP 2.1 - FastQC after clipping/merging (if applied!)
*/
process fastqc_after_clipping {
tag "${prefix}"
tag "${name}"
publishDir "${params.outdir}/FastQC/after_clipping", mode: 'copy',
saveAs: {filename -> filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename"}

Expand All @@ -692,7 +688,6 @@ process fastqc_after_clipping {
file "*_fastqc.{zip,html}" optional true into ch_fastqc_after_clipping

script:
prefix = reads[0].toString().tokenize('.')[0]
"""
fastqc -q $reads
"""
Expand All @@ -703,7 +698,7 @@ Step 3: Mapping with BWA, SAM to BAM, Sort BAM
*/

process bwa {
tag "$prefix"
tag "${name}"
publishDir "${params.outdir}/mapping/bwa", mode: 'copy'

when: !params.circularmapper && !params.bwamem
Expand All @@ -723,8 +718,8 @@ process bwa {
fasta = "${index}/${bwa_base}"

//PE data without merging, PE data without any AR applied
if (!params.singleEnd && (params.skip_collapse || params.skip_adapterremoval)){
prefix = reads[0].toString().tokenize('.')[0]
if (!params.singleEnd && (params.skip_collapse || params.skip_adapterremoval || params.skip_trim)){
prefix = "${reads[0].baseName}"
"""
bwa aln -t ${task.cpus} $fasta ${reads[0]} -n ${params.bwaalnn} -l ${params.bwaalnl} -k ${params.bwaalnk} -f ${prefix}.r1.sai
bwa aln -t ${task.cpus} $fasta ${reads[1]} -n ${params.bwaalnn} -l ${params.bwaalnl} -k ${params.bwaalnk} -f ${prefix}.r2.sai
Expand All @@ -733,7 +728,7 @@ process bwa {
"""
} else {
//PE collapsed, or SE data
prefix = reads[0].toString().tokenize('.')[0]
prefix = "${reads}.baseName}"
"""
bwa aln -t ${task.cpus} $fasta $reads -n ${params.bwaalnn} -l ${params.bwaalnl} -k ${params.bwaalnk} -f ${prefix}.sai
bwa samse -r "@RG\\tID:ILLUMINA-${prefix}\\tSM:${prefix}\\tPL:illumina" $fasta ${prefix}.sai $reads | samtools sort -@ ${task.cpus} -O bam - > "${prefix}".sorted.bam
Expand Down