diff --git a/conf/modules.config b/conf/modules.config index 069ae1256..0ba8d063c 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -1012,7 +1012,7 @@ process { } withName: MALTEXTRACT { - ext.args = [ + ext.args = { [ "-f ${params.metagenomics_maltextract_filter}", "-a ${params.metagenomics_maltextract_toppercent}", "--minPI ${params.metagenomics_maltextract_minpercentidentity}", @@ -1022,8 +1022,8 @@ process { params.metagenomics_maltextract_matches ? "--matches" : "", params.metagenomics_maltextract_megansummary ? "--meganSummary" : "", params.metagenomics_maltextract_usetopalignment ? "--useTopAlignment" : "", - { meta.strandedness } == "single" ? '--singleStranded' : '', - ].join(' ').trim() + meta.strandedness == "single" ? '--singleStranded' : '', + ].join(' ').trim() } publishDir = [ path: { "${params.outdir}/metagenomics/postprocessing/maltextract/" }, mode: params.publish_dir_mode, @@ -1391,7 +1391,7 @@ process { } withName: MALTEXTRACT { - ext.args = [ + ext.args = { [ "-f ${params.metagenomics_maltextract_filter}", "-a ${params.metagenomics_maltextract_toppercent}", "--minPI ${params.metagenomics_maltextract_minpercentidentity}", @@ -1401,8 +1401,8 @@ process { params.metagenomics_maltextract_matches ? "--matches" : "", params.metagenomics_maltextract_megansummary ? "--meganSummary" : "", params.metagenomics_maltextract_usetopalignment ? "--useTopAlignment" : "", - { meta.strandedness } == "single" ? '--singleStranded' : '', - ].join(' ').trim() + meta.strandedness == "single" ? '--singleStranded' : '', + ].join(' ').trim() } publishDir = [ [ path: { "${params.outdir}/metagenomics/maltextract/stats/" }, diff --git a/modules/nf-core/bwa/index/main.nf b/modules/nf-core/bwa/index/main.nf index 2e48b6caa..2aad5ac01 100644 --- a/modules/nf-core/bwa/index/main.nf +++ b/modules/nf-core/bwa/index/main.nf @@ -11,7 +11,7 @@ process BWA_INDEX { tuple val(meta), path(fasta) output: - tuple val(meta), path(bwa) , emit: index + tuple val(meta), path('bwa') , emit: index path "versions.yml" , emit: versions when: diff --git a/modules/nf-core/samtools/mpileup/main.nf b/modules/nf-core/samtools/mpileup/main.nf index 3e4cc409b..5b4365309 100644 --- a/modules/nf-core/samtools/mpileup/main.nf +++ b/modules/nf-core/samtools/mpileup/main.nf @@ -20,13 +20,13 @@ process SAMTOOLS_MPILEUP { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def intervals = intervals ? "-l ${intervals}" : "" + def intervals_opt = intervals ? "-l ${intervals}" : "" """ samtools mpileup \\ --fasta-ref $fasta \\ --output ${prefix}.mpileup \\ $args \\ - $intervals \\ + $intervals_opt \\ $input bgzip ${prefix}.mpileup cat <<-END_VERSIONS > versions.yml diff --git a/modules/nf-core/samtools/view/main.nf b/modules/nf-core/samtools/view/main.nf index 0b5a29121..54e68b9c1 100644 --- a/modules/nf-core/samtools/view/main.nf +++ b/modules/nf-core/samtools/view/main.nf @@ -61,11 +61,11 @@ process SAMTOOLS_VIEW { input.getExtension() if ("$input" == "${prefix}.${file_type}") error "Input and output names are the same, use \"task.ext.prefix\" to disambiguate!" - def index = args.contains("--write-index") ? "touch ${prefix}.csi" : "" + def index_cmd = args.contains("--write-index") ? "touch ${prefix}.csi" : "" """ touch ${prefix}.${file_type} - ${index} + ${index_cmd} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/subworkflows/local/calculate_damage.nf b/subworkflows/local/calculate_damage.nf index a395e8abc..cf26e7766 100644 --- a/subworkflows/local/calculate_damage.nf +++ b/subworkflows/local/calculate_damage.nf @@ -34,10 +34,10 @@ workflow CALCULATE_DAMAGE { ch_refs ) .multiMap{ - ignore_me, meta, bam, bai, meta2, fasta, fasta_fai -> + ignore_me, meta, bam, bai, meta2, fasta_, fasta_fai_ -> bam: [ meta, bam ] - fasta: fasta - fasta_fai: fasta_fai + fasta: fasta_ + fasta_fai: fasta_fai_ } // Calculate damage if ( params.damagecalculation_tool == 'damageprofiler' ) { diff --git a/subworkflows/local/circularmapper.nf b/subworkflows/local/circularmapper.nf index 46874f987..342efd2fc 100644 --- a/subworkflows/local/circularmapper.nf +++ b/subworkflows/local/circularmapper.nf @@ -41,7 +41,7 @@ workflow CIRCULARMAPPER { .map{ // create meta consistent with rest of MAP workflow meta, bam -> - new_meta = meta + [ reference: meta.id_index ] + def new_meta = meta + [ reference: meta.id_index ] [ new_meta, bam ] } .map { diff --git a/subworkflows/local/deduplicate.nf b/subworkflows/local/deduplicate.nf index c2c94a88d..2d93c49e5 100644 --- a/subworkflows/local/deduplicate.nf +++ b/subworkflows/local/deduplicate.nf @@ -71,10 +71,10 @@ workflow DEDUPLICATE { ch_refs ) .multiMap{ - ignore_me, meta, bam, bai, meta2, fasta, fasta_fai -> + ignore_me, meta, bam, bai, meta2, fasta_, fasta_fai_ -> bam: [ meta, bam ] - fasta: [ meta2, fasta ] - fasta_fai: [ meta2, fasta_fai ] + fasta: [ meta2, fasta_ ] + fasta_fai: [ meta2, fasta_fai_ ] } // Dedup each bam @@ -103,7 +103,7 @@ workflow DEDUPLICATE { ch_input_for_samtools_merge = ch_dedupped_region_bam .map { meta, bam -> - meta2 = meta.clone().findAll{ it.key != 'genomic_region' } + def meta2 = meta.clone().findAll{ it.key != 'genomic_region' } [ meta2, bam ] } .groupTuple() @@ -117,10 +117,10 @@ workflow DEDUPLICATE { ) .multiMap{ // bam here is a list of bams - ignore_me, meta, bam, meta2, fasta, fasta_fai -> + ignore_me, meta, bam, meta2, fasta_, fasta_fai_ -> bam: [ meta, bam ] - fasta: [ meta2, fasta ] - fasta_fai: [ meta2, fasta_fai ] + fasta: [ meta2, fasta_ ] + fasta_fai: [ meta2, fasta_fai_ ] } // Merge the bams for each region into one bam diff --git a/subworkflows/local/genotype.nf b/subworkflows/local/genotype.nf index a2b5f7525..3abffdc7f 100644 --- a/subworkflows/local/genotype.nf +++ b/subworkflows/local/genotype.nf @@ -372,7 +372,7 @@ workflow GENOTYPE { def strandedness = metas.collect { meta -> meta.strandedness } def single_ends = metas.collect { meta -> meta.single_end } def reference = combo_meta.reference - new_meta = [ sample_id: ids, strandedness: strandedness, single_end: single_ends, reference: reference ] + def new_meta = [ sample_id: ids, strandedness: strandedness, single_end: single_ends, reference: reference ] [ combo_meta, new_meta, bams, bais ] // Drop bais } // Collect all IDs into a list in meta.sample_id. diff --git a/subworkflows/local/manipulate_damage.nf b/subworkflows/local/manipulate_damage.nf index bc8d96cba..ee103bea8 100644 --- a/subworkflows/local/manipulate_damage.nf +++ b/subworkflows/local/manipulate_damage.nf @@ -154,8 +154,8 @@ workflow MANIPULATE_DAMAGE { ch_trimbam_input = ch_to_trim .map { meta, bam -> - trim_left = meta.strandedness == 'single' ? ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_single_stranded_none_udg_left : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_single_stranded_half_udg_left : 0 ) : ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_double_stranded_none_udg_left : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_double_stranded_half_udg_left : 0 ) - trim_right = meta.strandedness == 'single' ? ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_single_stranded_none_udg_right : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_single_stranded_half_udg_right : 0 ) : ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_double_stranded_none_udg_right : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_double_stranded_half_udg_right : 0 ) + def trim_left = meta.strandedness == 'single' ? ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_single_stranded_none_udg_left : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_single_stranded_half_udg_left : 0 ) : ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_double_stranded_none_udg_left : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_double_stranded_half_udg_left : 0 ) + def trim_right = meta.strandedness == 'single' ? ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_single_stranded_none_udg_right : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_single_stranded_half_udg_right : 0 ) : ( meta.damage_treatment == 'none' ? params.damage_manipulation_bamutils_trim_double_stranded_none_udg_right : meta.damage_treatment == 'half' ? params.damage_manipulation_bamutils_trim_double_stranded_half_udg_right : 0 ) [ meta, bam, trim_left, trim_right ] } diff --git a/subworkflows/local/map.nf b/subworkflows/local/map.nf index 8b30728cf..48f4b067a 100644 --- a/subworkflows/local/map.nf +++ b/subworkflows/local/map.nf @@ -38,10 +38,10 @@ workflow MAP { sharded_reads = SEQKIT_SPLIT2.out.reads .transpose() .map { - meta, reads -> - new_meta = meta.clone() - new_meta.shard_number = reads.getName().replaceAll(/.*(part_\d+).(?:fastq|fq).gz/, '$1') - [ new_meta, reads ] + meta, reads_ -> + def new_meta = meta.clone() + new_meta.shard_number = reads_.getName().replaceAll(/.*(part_\d+).(?:fastq|fq).gz/, '$1') + [ new_meta, reads_ ] } .groupTuple() @@ -52,7 +52,7 @@ workflow MAP { } if ( params.mapping_tool == 'bwaaln' ) { - ch_index_for_mapping = index.map{ meta, index, fasta -> [ meta, index ] } + ch_index_for_mapping = index.map{ meta, index_, fasta -> [ meta, index_ ] } ch_reads_for_mapping = ch_input_for_mapping FASTQ_ALIGN_BWAALN ( ch_reads_for_mapping, ch_index_for_mapping ) @@ -61,7 +61,7 @@ workflow MAP { .map{ // create meta consistent with rest of workflow meta, bam -> - new_meta = meta + [ reference: meta.id_index ] + def new_meta = meta + [ reference: meta.id_index ] [ new_meta, bam ] } @@ -71,10 +71,10 @@ workflow MAP { ch_input_for_mapping = ch_input_for_mapping .combine( index ) .multiMap { - meta, reads, meta2, index, fasta -> - new_meta = meta + [ reference: meta2.id ] - reads: [ new_meta, reads ] - index: [ meta2, index ] + meta, reads_, meta2, index_, fasta -> + def new_meta = meta + [ reference: meta2.id ] + reads: [ new_meta, reads_ ] + index: [ meta2, index_ ] fasta: [ meta2, fasta ] } @@ -88,12 +88,12 @@ workflow MAP { } else if ( params.mapping_tool == 'bowtie2' ) { ch_input_for_mapping = ch_input_for_mapping - .combine( index.map{ meta, index, fasta -> [ meta, index ] } ) + .combine( index.map{ meta, index_, fasta -> [ meta, index_ ] } ) .multiMap { - meta, reads, meta2, index -> - new_meta = meta + [ reference: meta2.id ] - reads: [ new_meta, reads ] - index: [ meta2, index ] + meta, reads_, meta2, index_ -> + def new_meta = meta + [ reference: meta2.id ] + reads: [ new_meta, reads_ ] + index: [ meta2, index_ ] } BOWTIE2_ALIGN ( ch_input_for_mapping.reads, ch_input_for_mapping.index, false, true ) @@ -117,12 +117,12 @@ workflow MAP { ch_mapped_lane_bai = CIRCULARMAPPER.out.bai // [ [ meta ], bai/csi ] } else if ( params.mapping_tool == 'mapad' ) { ch_input_for_mapping = ch_input_for_mapping - .combine( index.map{ meta, index, fasta -> [ meta, index ] } ) + .combine( index.map{ meta, index_, fasta -> [ meta, index_ ] } ) .multiMap { - meta, reads, meta2, index -> - new_meta = meta + [ reference: meta2.id ] - reads: [ new_meta, reads ] - index: [ meta2, index ] + meta, reads_, meta2, index_ -> + def new_meta = meta + [ reference: meta2.id ] + reads: [ new_meta, reads_ ] + index: [ meta2, index_ ] strandedness: meta.strandedness=="double" } @@ -159,7 +159,7 @@ workflow MAP { .flatMap() .map { meta, bam -> - new_meta = meta.clone().findAll{ it.key !in ['lane', 'colour_chemistry', 'shard_number'] } + def new_meta = meta.clone().findAll{ it.key !in ['lane', 'colour_chemistry', 'shard_number'] } [ new_meta, bam ] } .groupTuple() diff --git a/subworkflows/local/metagenomics_postprocessing.nf b/subworkflows/local/metagenomics_postprocessing.nf index 19ea332c0..5cf4f5fa8 100644 --- a/subworkflows/local/metagenomics_postprocessing.nf +++ b/subworkflows/local/metagenomics_postprocessing.nf @@ -6,9 +6,10 @@ include { MEGAN_RMA2INFO } from '../../modules/nf-core/megan/rma2info/main workflow METAGENOMICS_POSTPROCESSING { - take: ch_postprocessing_input // different between each profiling --> postprocessing tool, defined in metagenomics profiling subworkflow - take: ch_tax_list - take: ch_ncbi_dir + take: + ch_postprocessing_input // different between each profiling --> postprocessing tool, defined in metagenomics profiling subworkflow + ch_tax_list + ch_ncbi_dir main: ch_versions = Channel.empty() diff --git a/subworkflows/local/metagenomics_profiling.nf b/subworkflows/local/metagenomics_profiling.nf index 6ad39b7e4..2400d28ab 100644 --- a/subworkflows/local/metagenomics_profiling.nf +++ b/subworkflows/local/metagenomics_profiling.nf @@ -78,14 +78,16 @@ workflow METAGENOMICS_PROFILING { // could be prevented by branching early and running the lower part twice for ss and ds individually // but this is an edge-case and might never be relevant... ch_input_for_malt = ch_reads.combine(ch_tmp_groups).map{ meta, reads, n_groups -> - [ + def result = [ [ label: label, strandedness:meta.strandedness, - id:"${meta.strandedness}stranded_${groups_counter++%n_groups}" + id:"${meta.strandedness}stranded_${groups_counter % n_groups}" ], reads ] + groups_counter += 1 + return result } .groupTuple(by:0) diff --git a/subworkflows/local/preprocessing_adapterremoval.nf b/subworkflows/local/preprocessing_adapterremoval.nf index 45778b266..752cd43ae 100644 --- a/subworkflows/local/preprocessing_adapterremoval.nf +++ b/subworkflows/local/preprocessing_adapterremoval.nf @@ -45,10 +45,10 @@ workflow PREPROCESSING_ADAPTERREMOVAL { ADAPTERREMOVAL_PAIRED.out.singles_truncated, ADAPTERREMOVAL_PAIRED.out.paired_truncated ) - .map { meta, reads -> + .map { meta, reads_ -> def meta_new = meta.clone() meta_new.single_end = true - [meta_new, reads] + [meta_new, reads_] } .groupTuple() // Paired-end reads cause a nested tuple during grouping. @@ -69,10 +69,10 @@ workflow PREPROCESSING_ADAPTERREMOVAL { ADAPTERREMOVAL_PAIRED.out.collapsed, ADAPTERREMOVAL_PAIRED.out.collapsed_truncated ) - .map { meta, reads -> + .map { meta, reads_ -> def meta_new = meta.clone() meta_new.single_end = true - [meta_new, reads] + [meta_new, reads_] } .groupTuple() .map { meta, fastq -> [meta, fastq.flatten()] } diff --git a/subworkflows/local/preprocessing_fastp.nf b/subworkflows/local/preprocessing_fastp.nf index 780fe84e9..d25ff01c0 100644 --- a/subworkflows/local/preprocessing_fastp.nf +++ b/subworkflows/local/preprocessing_fastp.nf @@ -32,10 +32,10 @@ workflow PREPROCESSING_FASTP { if ( !params.preprocessing_skippairmerging ) { ch_fastp_reads_prepped_pe = FASTP_PAIRED.out.reads_merged .map { - meta, reads -> + meta, reads_ -> def meta_new = meta.clone() meta_new['single_end'] = true - [ meta_new, [ reads ].flatten() ] + [ meta_new, [ reads_ ].flatten() ] } ch_fastp_reads_prepped = ch_fastp_reads_prepped_pe.mix( FASTP_SINGLE.out.reads ) diff --git a/subworkflows/local/reference_indexing_single.nf b/subworkflows/local/reference_indexing_single.nf index d98834376..0ddc4bf57 100644 --- a/subworkflows/local/reference_indexing_single.nf +++ b/subworkflows/local/reference_indexing_single.nf @@ -87,7 +87,7 @@ workflow REFERENCE_INDEXING_SINGLE { .join(ch_fasta_dict, failOnMismatch: true) .join(ch_fasta_mapperindexdir, failOnMismatch: true) .map{ - meta, fasta, fai, dict, mapper_index -> + meta, fasta_, fai, dict, mapper_index -> def contamination_estimation_angsd_hapmap = params.contamination_estimation_angsd_hapmap != null ? file( params.contamination_estimation_angsd_hapmap, checkIfExists: true ) : "" def pmd_masked_fasta = params.damage_manipulation_pmdtools_masked_reference != null ? file(params.damage_manipulation_pmdtools_masked_reference, checkIfExists: true ) : "" def pmd_bed_for_masking = params.damage_manipulation_pmdtools_reference_mask != null ? file(params.damage_manipulation_pmdtools_reference_mask, checkIfExists: true ) : "" @@ -100,13 +100,13 @@ workflow REFERENCE_INDEXING_SINGLE { def genotyping_gatk_dbsnp = params.genotyping_gatk_dbsnp != null ? file(params.genotyping_gatk_dbsnp, checkIfExists: true ) : "" def circularmapper_elongated_fasta = params.fasta_circularmapper_elongatedfasta != null ? file( params.fasta_circularmapper_elongatedfasta, checkIfExists: true ) : "" def circularmapper_elongated_index = params.fasta_circularmapper_elongatedindex != null ? file( params.fasta_circularmapper_elongatedindex, checkIfExists: true ) : "" - [ meta + [ ploidy: genotyping_reference_ploidy ], fasta, fai, dict, mapper_index, params.fasta_circular_target, params.mitochondrion_header, contamination_estimation_angsd_hapmap, pmd_masked_fasta, pmd_bed_for_masking, capture_bed, pileupcaller_bed, pileupcaller_snp, sexdet_bed, bedtools_feature, genotyping_gatk_dbsnp, circularmapper_elongated_fasta, circularmapper_elongated_index ] + [ meta + [ ploidy: genotyping_reference_ploidy ], fasta_, fai, dict, mapper_index, params.fasta_circular_target, params.mitochondrion_header, contamination_estimation_angsd_hapmap, pmd_masked_fasta, pmd_bed_for_masking, capture_bed, pileupcaller_bed, pileupcaller_snp, sexdet_bed, bedtools_feature, genotyping_gatk_dbsnp, circularmapper_elongated_fasta, circularmapper_elongated_index ] } ch_ref_index_single = ch_reference_for_mapping .multiMap{ - meta, fasta, fai, dict, mapper_index, circular_target, mitochondrion_header, contamination_estimation_angsd_hapmap, pmd_masked_fasta, pmd_bed_for_masking, capture_bed, pileupcaller_bed, pileupcaller_snp, sexdet_bed, bedtools_feature, genotyping_gatk_dbsnp, circularmapper_elongated_fasta, circularmapper_elongated_index -> - reference: [ meta, fasta, fai, dict, mapper_index ] + meta, fasta_, fai, dict, mapper_index, circular_target, mitochondrion_header, contamination_estimation_angsd_hapmap, pmd_masked_fasta, pmd_bed_for_masking, capture_bed, pileupcaller_bed, pileupcaller_snp, sexdet_bed, bedtools_feature, genotyping_gatk_dbsnp, circularmapper_elongated_fasta, circularmapper_elongated_index -> + reference: [ meta, fasta_, fai, dict, mapper_index ] circularmapper: [ meta, circular_target, circularmapper_elongated_fasta, circularmapper_elongated_index ] mito_header: [ meta, mitochondrion_header ] hapmap: [ meta, contamination_estimation_angsd_hapmap ] diff --git a/subworkflows/local/utils_nfcore_eager_pipeline/main.nf b/subworkflows/local/utils_nfcore_eager_pipeline/main.nf index b562b355b..0666b2f97 100644 --- a/subworkflows/local/utils_nfcore_eager_pipeline/main.nf +++ b/subworkflows/local/utils_nfcore_eager_pipeline/main.nf @@ -119,7 +119,7 @@ workflow PIPELINE_INITIALISATION { ch_samplesheet_fastqs = ch_samplesheet_for_branch.fastq .map { meta, r1, r2, bam -> - reads = meta.single_end ? [ r1 ] : [ r1, r2 ] + def reads = meta.single_end ? [ r1 ] : [ r1, r2 ] [ meta - meta.subMap('pairment', 'bam_reference_id'), reads ] } @@ -417,7 +417,7 @@ def addNewMetaFromAttributes( ArrayList row, Object source_attributes, Object ta } else if ((source_attributes instanceof List) && (target_attributes instanceof List)) { if (source_attributes.size() == target_attributes.size()) { - for (int i = 0; i < source_attributes.size(); i++) { + (0.. // Option B: Both are lists of same size meta2[target_attributes[i]] = meta[source_attributes[i]] } diff --git a/subworkflows/nf-core/bam_split_by_region/main.nf b/subworkflows/nf-core/bam_split_by_region/main.nf index 0ec700b14..d202d15e4 100644 --- a/subworkflows/nf-core/bam_split_by_region/main.nf +++ b/subworkflows/nf-core/bam_split_by_region/main.nf @@ -25,12 +25,19 @@ workflow BAM_SPLIT_BY_REGION { } .splitCsv ( header: ['seq_name', 'start', 'stop'], sep:'\t', elem: 1) .map{ meta, stats -> + def chrom // If the regions file contains just a sequence name provide that - if (! stats['start'] ) [ chrom = stats['seq_name'] ] + if (! stats['start'] ) { + chrom = stats['seq_name'] + } // If a specific position is given, use that - else if ( ! stats['stop']) [ chrom = [ stats['seq_name'], stats['start'] ].join(":") ] + else if ( ! stats['stop']) { + chrom = [ stats['seq_name'], stats['start'] ].join(":") + } // If a region between specific bps is requested use that - else [ chrom = [ [ stats['seq_name'], stats['start'] ].join(":"), stats['stop'] ].join('-') ] + else { + chrom = [ [ stats['seq_name'], stats['start'] ].join(":"), stats['stop'] ].join('-') + } [ meta, chrom ] } diff --git a/workflows/eager.nf b/workflows/eager.nf index e613d5cd5..c8b5a72e0 100644 --- a/workflows/eager.nf +++ b/workflows/eager.nf @@ -316,14 +316,14 @@ workflow EAGER { ch_bam_for_host_removal = MAP.out.bam .join(MAP.out.bai) .map { meta, bam, bai -> - new_meta = meta.clone().findAll { it.key !in ['single_end', 'reference'] } + def new_meta = meta.clone().findAll { it.key !in ['single_end', 'reference'] } [new_meta, meta, bam, bai] } // Preparing fastq channel for host removal to be combined with the bam channel // The meta of the fastq channel contains additional fields when compared to the meta from the bam channel: lane, colour_chemistry, // and not necessarily matching single_end. Those fields are dropped of the meta in the map and stored in new_meta ch_fastqs_for_host_removal = ch_fastqs_for_preprocessing.map { meta, fastqs -> - new_meta = meta.clone().findAll { it.key !in ['lane', 'colour_chemistry', 'single_end'] } + def new_meta = meta.clone().findAll { it.key !in ['lane', 'colour_chemistry', 'single_end'] } [new_meta, meta, fastqs] } // We join the bam and fastq channel with now matching metas (new_meta) referred as meta_join