Skip to content

Commit f6d4c13

Browse files
authored
Merge pull request #17 from nf-core/dev
Dev
2 parents 3d58894 + 5d56e6e commit f6d4c13

5 files changed

Lines changed: 46 additions & 32 deletions

File tree

.github/workflows/branch.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ on:
77

88
jobs:
99
test:
10-
runs-on: ubuntu-latest
10+
runs-on: ubuntu-18.04
1111
steps:
1212
# PRs are only ok if coming from an nf-core dev branch
1313
- name: Check PRs
@@ -24,4 +24,4 @@ jobs:
2424
echo "GITHUB_REF: ${GITHUB_REF}"
2525
echo "GITHUB_HEAD_REF: ${GITHUB_HEAD_REF}"
2626
echo "GITHUB_BASE_REF: ${GITHUB_BASE_REF}"
27-
[ ${GITHUB_ACTOR} = "nf-core" ] && [ ${GITHUB_HEAD_REF} = "dev" ]
27+
[ ${GITHUB_ACTOR} = "nf-core" ] && [ ${GITHUB_HEAD_REF} = "dev" ]

.github/workflows/linting.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ on: [push, pull_request]
44

55
jobs:
66
Markdown:
7-
runs-on: ubuntu-latest
7+
runs-on: ubuntu-18.04
88
steps:
99
- uses: actions/checkout@v1
1010
- uses: actions/setup-node@v1
@@ -37,4 +37,4 @@ jobs:
3737
pip install nf-core
3838
- name: Run nf-core lint
3939
run: |
40-
nf-core lint ${GITHUB_WORKSPACE}
40+
nf-core lint ${GITHUB_WORKSPACE}

.github/workflows/nf-core_eager.yml

Lines changed: 36 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,17 @@ name: nf-core eager CI
33
on: [push, pull_request]
44

55
jobs:
6+
conda_build:
7+
runs-on: ubuntu-18.04
8+
steps:
9+
- uses: actions/checkout@v1
10+
- name: Try Creating Conda env
11+
run: |
12+
conda env create --prefix nf-core-eager-2.1.0dev-d95a13feb408cc17e95d38f16a81010d --file environment.yml
613
github_actions_ci:
714
runs-on: ubuntu-latest
815
env:
9-
TOWER_ACCESS_TOKEN: 1c1f493bc2703472d6f1b9f6fb9e9d117abab7b1
16+
TOWER_ACCESS_TOKEN: ${{ secrets.TOWER_ACCESS_TOKEN }}
1017
NXF_ANSI_LOG: 0
1118
strategy:
1219
matrix:
@@ -21,74 +28,78 @@ jobs:
2128
shell: bash
2229
run: echo "::set-env name=RUN_NAME::`echo ${GITHUB_REPOSITORY//\//_}`-`echo ${GITHUB_HEAD_REF//\//@} | rev | cut -f1 -d@ | rev`-${{ github.event_name }}-`echo ${GITHUB_SHA} | cut -c1-6`"
2330
id: extract_branch
31+
- name: Determine tower usage
32+
shell: bash
33+
run: echo "::set-env name=TOWER::`[ -z "$TOWER_ACCESS_TOKEN" ] && echo '' || echo '-with-tower'`"
34+
id: tower_usage
2435
- name: BASIC Run the basic pipeline with the test profileBasic workflow, PE/SE, bwa aln
2536
run: |
26-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-basic" -profile test,docker ${{ matrix.endedness }} --saveReference
37+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-basic" -profile test,docker ${{ matrix.endedness }} --saveReference
2738
- name: REFERENCE Basic workflow, with supplied indices
2839
run: |
29-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-preindex_ref" -profile test,docker ${{ matrix.endedness }} --bwa_index results/reference_genome/bwa_index/BWAIndex/Mammoth_MT_Krause.fasta
40+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-preindex_ref" -profile test,docker ${{ matrix.endedness }} --bwa_index results/reference_genome/bwa_index/BWAIndex/Mammoth_MT_Krause.fasta
3041
- name: REFERENCE Run the basic pipeline with FastA reference with `fna` extension
3142
run: |
32-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-fna_ref" -profile test_fna,docker --pairedEnd
43+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-fna_ref" -profile test_fna,docker --pairedEnd
3344
- name: REFERENCE Test with zipped reference input
3445
run: |
35-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-gz_ref" -profile test,docker --pairedEnd --fasta 'https://github.com/jfy133/test-datasets/raw/eager/reference/Mammoth/Mammoth_MT_Krause.fasta.gz'
46+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-gz_ref" -profile test,docker --pairedEnd --fasta 'https://github.com/jfy133/test-datasets/raw/eager/reference/Mammoth/Mammoth_MT_Krause.fasta.gz'
3647
- name: FASTP Test fastp complexity filtering
3748
run: |
38-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-fastp" -profile test,docker --pairedEnd --complexity_filter
49+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-fastp" -profile test,docker --pairedEnd --complexity_filter
3950
- name: ADAPTERREMOVAL Test skip pairedEnd collapsing
4051
run: |
41-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-skip_collapse" -profile test,docker --pairedEnd --skip_collapse
52+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-skip_collapse" -profile test,docker --pairedEnd --skip_collapse
4253
- name: ADAPTERREMOVAL Test pairedEnd collapsing but no trimming
4354
run: |
44-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-pretrim" -profile test_pretrim,docker --pairedEnd --skip_trim
55+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-pretrim" -profile test_pretrim,docker --pairedEnd --skip_trim
4556
- name: ADAPTERREMOVAL Run the basic pipeline with paired end data without adapterRemoval
4657
run: |
47-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-skip_adapterremoval" -profile test,docker --pairedEnd --skip_adapterremoval
58+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-skip_adapterremoval" -profile test,docker --pairedEnd --skip_adapterremoval
4859
- name: ADAPTERREMOVAL Run the basic pipeline with preserve5p end option
4960
run: |
50-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-preserve5p" -profile test,docker --pairedEnd --preserve5p
61+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-preserve5p" -profile test,docker --pairedEnd --preserve5p
5162
- name: ADAPTERREMOVAL Run the basic pipeline with merged only option
5263
run: |
53-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-mergedonly" -profile test,docker --pairedEnd --mergedonly
64+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-mergedonly" -profile test,docker --pairedEnd --mergedonly
5465
- name: ADAPTERREMOVAL Run the basic pipeline with preserve5p end and merged reads only options
5566
run: |
56-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-preserve5p_mergedonly" -profile test,docker --pairedEnd --preserve5p --mergedonly
67+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-preserve5p_mergedonly" -profile test,docker --pairedEnd --preserve5p --mergedonly
5768
- name: MAPPER_CIRCULARMAPPER Test running with CircularMapper
5869
run: |
59-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-circularmapper" -profile test,docker --pairedEnd --mapper 'circularmapper' --circulartarget 'NC_007596.2'
70+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-circularmapper" -profile test,docker --pairedEnd --mapper 'circularmapper' --circulartarget 'NC_007596.2'
6071
- name: MAPPER_BWAMEM Test running with BWA Mem
6172
run: |
62-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-bwa_mem" -profile test,docker --pairedEnd --mapper 'bwamem'
73+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-bwa_mem" -profile test,docker --pairedEnd --mapper 'bwamem'
6374
- name: STRIP_FASTQ Run the basic pipeline with output unmapped reads as fastq
6475
run: |
65-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-stripfastq" -profile test,docker --pairedEnd --strip_input_fastq
76+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-stripfastq" -profile test,docker --pairedEnd --strip_input_fastq
6677
- name: BAM_FILTERING Run basic mapping pipeline with mapping quality filtering, and unmapped export
6778
run: |
68-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-unmapped_export" -profile test,docker --pairedEnd --run_bam_filtering --bam_mapping_quality_threshold 37 --bam_discard_umapped --bam_unmapped_type 'fastq'
79+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-unmapped_export" -profile test,docker --pairedEnd --run_bam_filtering --bam_mapping_quality_threshold 37 --bam_discard_umapped --bam_unmapped_type 'fastq'
6980
- name: GENOTYPING_HC Test running GATK HaplotypeCaller
7081
run: |
71-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-haplotypercaller" -profile test_fna,docker --pairedEnd --dedupper 'dedup' --run_genotyping --genotyping_tool 'hc' --gatk_out_mode 'EMIT_ALL_SITES' --gatk_hc_emitrefconf 'BP_RESOLUTION'
82+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-haplotypercaller" -profile test_fna,docker --pairedEnd --dedupper 'dedup' --run_genotyping --genotyping_tool 'hc' --gatk_out_mode 'EMIT_ALL_SITES' --gatk_hc_emitrefconf 'BP_RESOLUTION'
7283
- name: GENOTYPING_FB Test running FreeBayes
7384
run: |
74-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-freebayes" -profile test,docker --pairedEnd --dedupper 'dedup' --run_genotyping --genotyping_tool 'freebayes'
85+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-freebayes" -profile test,docker --pairedEnd --dedupper 'dedup' --run_genotyping --genotyping_tool 'freebayes'
7586
- name: SKIPPING Test checking all skip steps work i.e. input bam, skipping straight to genotyping
7687
run: |
77-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-skipping_logic" -profile test_bam,docker --bam --singleEnd --skip_fastqc --skip_adapterremoval --skip_mapping --skip_deduplication --skip_qualimap --skip_preseq --skip_damage_calculation --run_genotyping --genotyping_tool 'freebayes'
88+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-skipping_logic" -profile test_bam,docker --bam --singleEnd --skip_fastqc --skip_adapterremoval --skip_mapping --skip_deduplication --skip_qualimap --skip_preseq --skip_damage_calculation --run_genotyping --genotyping_tool 'freebayes'
7889
- name: TRIM_BAM/PMD/GENOTYPING_UG/MULTIVCFANALYZER Test running PMDTools, TrimBam, GATK UnifiedGenotyper and MultiVCFAnalyzer
7990
run: |
80-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-pmd_trimbam_unifiedgenotyper_multivcfanalyzer" -profile test,docker --pairedEnd --dedupper 'dedup' --run_trim_bam --run_pmdtools --run_genotyping --genotyping_source 'trimmed' --genotyping_tool 'ug' --gatk_out_mode 'EMIT_ALL_SITES' --gatk_ug_genotype_model 'SNP' --run_multivcfanalyzer
91+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-pmd_trimbam_unifiedgenotyper_multivcfanalyzer" -profile test,docker --pairedEnd --dedupper 'dedup' --run_trim_bam --run_pmdtools --run_genotyping --genotyping_source 'trimmed' --genotyping_tool 'ug' --gatk_out_mode 'EMIT_ALL_SITES' --gatk_ug_genotype_model 'SNP' --run_multivcfanalyzer
8192
- name: GENOTYPING_UG/PMD/MULTIVCFANALYZER Test running GATK UnifiedGenotyper and MultiVCFAnalyzer, additional VCFS
8293
run: |
83-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-multivcfanalyzer_additionalvcfs" -profile test,docker --pairedEnd --dedupper 'dedup' --run_genotyping --genotyping_tool 'ug' --gatk_out_mode 'EMIT_ALL_SITES' --gatk_ug_genotype_model 'SNP' --run_multivcfanalyzer --additional_vcf_files 'https://raw.githubusercontent.com/jfy133/test-datasets/eager/testdata/Mammoth/vcf/JK2772_CATCAGTGAGTAGA_L008_R1_001.fastq.gz.tengrand.fq.combined.fq.mapped_rmdup.bam.unifiedgenotyper.vcf.gz' --write_allele_frequencies
94+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-multivcfanalyzer_additionalvcfs" -profile test,docker --pairedEnd --dedupper 'dedup' --run_genotyping --genotyping_tool 'ug' --gatk_out_mode 'EMIT_ALL_SITES' --gatk_ug_genotype_model 'SNP' --run_multivcfanalyzer --additional_vcf_files 'https://raw.githubusercontent.com/jfy133/test-datasets/eager/testdata/Mammoth/vcf/JK2772_CATCAGTGAGTAGA_L008_R1_001.fastq.gz.tengrand.fq.combined.fq.mapped_rmdup.bam.unifiedgenotyper.vcf.gz' --write_allele_frequencies
8495
- name: BAM_INPUT Run the basic pipeline with the bam input profile, skip AdapterRemoval as no convertBam
8596
run: |
86-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-baminput_noConvertBam" -profile test_bam,docker --bam --skip_adapterremoval --run_convertbam
97+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-baminput_noConvertBam" -profile test_bam,docker --bam --skip_adapterremoval --run_convertbam
8798
- name: BAM_INPUT Run the basic pipeline with the bam input profile, convert to FASTQ for adapterremoval test and downstream
8899
run: |
89-
nextflow run ${GITHUB_WORKSPACE} -with-tower -name "$RUN_NAME-baminput_convertbam_basic" -profile test_bam,docker --bam --run_convertbam
100+
nextflow run ${GITHUB_WORKSPACE} "$TOWER" -name "$RUN_NAME-baminput_convertbam_basic" -profile test_bam,docker --bam --run_convertbam
90101
#- name: [DISABLED UNTIL BED FILE AVAILABLE - 2h RUN TIME WITHOUT] - SEXDETERMINAION Run the basic pipeline with the bam input profile, but don't convert BAM, skip everything but sex determination
91-
#nextflow run ${GITHUB_WORKSPACE} -with-tower -profile test_humanbam,docker --bam --skip_fastqc --skip_adapterremoval --skip_mapping --skip_deduplication --skip_qualimap --singleEnd --run_sexdeterrmine
102+
#nextflow run ${GITHUB_WORKSPACE} "$TOWER" -profile test_humanbam,docker --bam --skip_fastqc --skip_adapterremoval --skip_mapping --skip_deduplication --skip_qualimap --singleEnd --run_sexdeterrmine
92103
#- name: [DISABLED UNTIL SMALL HUMAN REFERENCE AVALIABLE - REQUIRES HUMAN FASTA] - NUCLEAR INPUT
93-
#nextflow run ${GITHUB_WORKSPACE} -with-tower -profile test_humanbam,docker --bam --skip_fastqc --skip_adapterremoval --skip_mapping --skip_deduplication --skip_qualimap --singleEnd --run_nuclearcontamination
104+
#nextflow run ${GITHUB_WORKSPACE} "$TOWER" -profile test_humanbam,docker --bam --skip_fastqc --skip_adapterremoval --skip_mapping --skip_deduplication --skip_qualimap --singleEnd --run_nuclearcontamination
94105

bin/print_x_contamination.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,13 +26,16 @@
2626
err_mom1=fields[4].split(":")[1]
2727
ml1=fields[5].split(":")[1]
2828
err_ml1=fields[6].split(":")[1]
29+
## Sometimes angsd fails to run method 2, and the error is printed directly after the SE for ML. When that happens, exclude the first word in the error from the output. (Method 2 data will be shown as NA)
30+
if err_ml1.endswith("contamination"):
31+
err_ml1 = err_ml1[:-13]
2932
elif line.strip()[0:7] == "Method2" and line.strip()[9:16] == 'new_llh':
3033
mom2=fields[3].split(":")[1]
3134
err_mom2=fields[4].split(":")[1]
3235
ml2=fields[5].split(":")[1]
3336
err_ml2=fields[6].split(":")[1]
34-
data[Ind]={ "Method1_MOM_estimate" : mom1, "Method1_MOM_SE" : err_mom1, "Method1_ML_estimate" : ml1, "Method1_ML_SE" : err_ml1, "Method2_MOM_estimate" : mom2, "Method2_MOM_SE" : err_mom2, "Method2_ML_estimate" : ml2, "Method2_ML_SE" : err_ml2 }
35-
print (Ind, mom1, err_mom1, ml1, err_ml1, mom2, err_mom2, ml2, err_ml2, sep="\t", file=output)
37+
data[Ind]={ "Number_of_SNPs" : nSNPs, "Method1_MOM_estimate" : mom1, "Method1_MOM_SE" : err_mom1, "Method1_ML_estimate" : ml1, "Method1_ML_SE" : err_ml1, "Method2_MOM_estimate" : mom2, "Method2_MOM_SE" : err_mom2, "Method2_ML_estimate" : ml2, "Method2_ML_SE" : err_ml2 }
38+
print (Ind, nSNPs, mom1, err_mom1, ml1, err_ml1, mom2, err_mom2, ml2, err_ml2, sep="\t", file=output)
3639

3740
with open('nuclear_contamination.json', 'w') as outfile:
3841
json.dump(data, outfile)

environment.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ channels:
44
- bioconda
55
- conda-forge
66
dependencies:
7-
- anaconda::openjdk=8.0.152
7+
- conda-forge::openjdk=8.0.144
88
- bioconda::fastqc=0.11.8
99
- bioconda::adapterremoval=2.3.1
1010
- bioconda::adapterremovalfixprefix=0.0.5

0 commit comments

Comments
 (0)