Skip to content

Commit

Permalink
Merge pull request #526 from nf-core/update-profiles
Browse files Browse the repository at this point in the history
Ensure alternative long-read processing tools included in tests
  • Loading branch information
jfy133 authored Sep 13, 2024
2 parents 1247a77 + 586514a commit 9ad901d
Show file tree
Hide file tree
Showing 14 changed files with 262 additions and 259 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
- "test_motus"
- "test_falco"
- "test_fastp"
- "test_adapterremoval"
- "test_alternativepreprocessing"
- "test_bbduk"
- "test_prinseqplusplus"

Expand Down
12 changes: 6 additions & 6 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,17 @@
*/

params {
config_profile_name = 'Test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'
config_profile_name = 'Test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.1.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.2.csv'
perform_shortread_qc = true
perform_shortread_redundancyestimation = true
perform_longread_qc = true
Expand Down
54 changes: 0 additions & 54 deletions conf/test_adapterremoval.config

This file was deleted.

56 changes: 56 additions & 0 deletions conf/test_alternativepreprocessing.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running minimal tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a fast and simple pipeline test.
Use as follows:
nextflow run nf-core/taxprofiler -profile test,<docker/singularity> --outdir <OUTDIR>
----------------------------------------------------------------------------------------
*/

params {
config_profile_name = 'Test profile for adapterremoval'
config_profile_description = "Minimal test to check the alternative short-read QC function, adapterremoval"

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.2.csv'
perform_shortread_qc = true
perform_longread_qc = true
shortread_qc_tool = 'adapterremoval'
longread_adapterremoval_tool = 'porechop'
longread_filter_tool = 'filtlong'
perform_shortread_complexityfilter = true
perform_shortread_hostremoval = true
perform_longread_hostremoval = true
perform_runmerging = true
hostremoval_reference = params.pipelines_testdata_base_path + 'modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = true
run_kraken2 = true
run_bracken = false
run_malt = false
run_metaphlan = false
run_centrifuge = false
run_diamond = false
run_krakenuniq = false
run_motus = false
run_ganon = false
run_kmcp = false
}

process {
withName: MALT_RUN {
maxForks = 1
ext.args = { "-m ${params.malt_mode} -J-Xmx12G" }
}
withName: NONPAREIL_NONPAREIL {
ext.args = { "-k 5" }
}
}
52 changes: 26 additions & 26 deletions conf/test_bbduk.config
Original file line number Diff line number Diff line change
Expand Up @@ -11,36 +11,36 @@
*/

params {
config_profile_name = 'Test profile for bbduk'
config_profile_description = "Minimal test to check the default tool of short-read complexity filtering, bbduk"
config_profile_name = 'Test profile for bbduk'
config_profile_description = "Minimal test to check the default tool of short-read complexity filtering, bbduk"

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.1.csv'
perform_shortread_qc = true
perform_longread_qc = true
perform_shortread_complexityfilter = true
shortread_complexityfilter_tool = 'bbduk'
perform_shortread_hostremoval = true
perform_longread_hostremoval = true
perform_runmerging = true
hostremoval_reference = params.pipelines_testdata_base_path + 'modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = true
run_kraken2 = true
run_bracken = false
run_malt = false
run_metaphlan = false
run_centrifuge = false
run_diamond = false
run_krakenuniq = false
run_motus = false
run_ganon = false
run_kmcp = false
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.2.csv'
perform_shortread_qc = true
perform_longread_qc = true
perform_shortread_complexityfilter = true
shortread_complexityfilter_tool = 'bbduk'
perform_shortread_hostremoval = true
perform_longread_hostremoval = true
perform_runmerging = true
hostremoval_reference = params.pipelines_testdata_base_path + 'modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = true
run_kraken2 = true
run_bracken = false
run_malt = false
run_metaphlan = false
run_centrifuge = false
run_diamond = false
run_krakenuniq = false
run_motus = false
run_ganon = false
run_kmcp = false
}

process {
Expand Down
52 changes: 26 additions & 26 deletions conf/test_falco.config
Original file line number Diff line number Diff line change
Expand Up @@ -11,36 +11,36 @@
*/

params {
config_profile_name = 'Test profile for Falco'
config_profile_description = "Minimal test dataset without performing any preprocessing nor profiling to check pipeline function but running falco instead of fastqc. Useful when you only wish to test a single profiler without having to 'opt-out' of all the others"
config_profile_name = 'Test profile for Falco'
config_profile_description = "Minimal test dataset without performing any preprocessing nor profiling to check pipeline function but running falco instead of fastqc. Useful when you only wish to test a single profiler without having to 'opt-out' of all the others"

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.1.csv'
preprocessing_qc_tool = 'falco'
perform_shortread_qc = true
perform_longread_qc = true
perform_shortread_complexityfilter = false
perform_shortread_hostremoval = false
perform_longread_hostremoval = false
perform_runmerging = false
hostremoval_reference = params.pipelines_testdata_base_path + 'modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = false
run_kraken2 = false
run_bracken = false
run_malt = false
run_metaphlan = false
run_centrifuge = false
run_diamond = false
run_krakenuniq = false
run_motus = false
run_ganon = false
run_kmcp = false
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.2.csv'
preprocessing_qc_tool = 'falco'
perform_shortread_qc = true
perform_longread_qc = true
perform_shortread_complexityfilter = false
perform_shortread_hostremoval = false
perform_longread_hostremoval = false
perform_runmerging = false
hostremoval_reference = params.pipelines_testdata_base_path + 'modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = false
run_kraken2 = false
run_bracken = false
run_malt = false
run_metaphlan = false
run_centrifuge = false
run_diamond = false
run_krakenuniq = false
run_motus = false
run_ganon = false
run_kmcp = false
}

process {
Expand Down
56 changes: 28 additions & 28 deletions conf/test_fastp.config
Original file line number Diff line number Diff line change
Expand Up @@ -11,38 +11,38 @@
*/

params {
config_profile_name = 'Test profile for fastp'
config_profile_description = "Minimal test to check the default short-read QC function, fastp"
config_profile_name = 'Test profile for fastp'
config_profile_description = "Minimal test to check the default short-read QC function, fastp"

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.1.csv'
perform_shortread_qc = true
perform_longread_qc = true
shortread_qc_tool = 'fastp'
perform_shortread_redundancyestimation = true
perform_shortread_complexityfilter = true
shortread_complexityfilter_tool = 'fastp'
perform_shortread_hostremoval = true
perform_longread_hostremoval = true
perform_runmerging = true
hostremoval_reference = params.pipelines_testdata_base_path + 'modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = true
run_kraken2 = true
run_bracken = false
run_malt = false
run_metaphlan = false
run_centrifuge = false
run_diamond = false
run_krakenuniq = false
run_motus = false
run_ganon = false
run_kmcp = false
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_v1.2.csv'
perform_shortread_qc = true
perform_longread_qc = true
shortread_qc_tool = 'fastp'
perform_shortread_redundancyestimation = true
perform_shortread_complexityfilter = true
shortread_complexityfilter_tool = 'fastp'
perform_shortread_hostremoval = true
perform_longread_hostremoval = true
perform_runmerging = true
hostremoval_reference = params.pipelines_testdata_base_path + 'modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = true
run_kraken2 = true
run_bracken = false
run_malt = false
run_metaphlan = false
run_centrifuge = false
run_diamond = false
run_krakenuniq = false
run_motus = false
run_ganon = false
run_kmcp = false
}

process {
Expand Down
10 changes: 5 additions & 5 deletions conf/test_full.config
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@
*/

params {
config_profile_name = 'Full test profile'
config_profile_description = 'Full test dataset to check pipeline function'
config_profile_name = 'Full test profile'
config_profile_description = 'Full test dataset to check pipeline function'

// Input data for full size test
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet_full.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_full_v1.1.csv'
input = params.pipelines_testdata_base_path + 'taxprofiler/samplesheet_full.csv'
databases = params.pipelines_testdata_base_path + 'taxprofiler/database_full_v1.2.csv'

// Genome references
hostremoval_reference = 'ftp://ftp.ncbi.nlm.nih.gov/genomes/all/GCA/000/819/615/GCA_000819615.1_ViralProj14015/GCA_000819615.1_ViralProj14015_genomic.fna.gz'
hostremoval_reference = 'ftp://ftp.ncbi.nlm.nih.gov/genomes/all/GCA/000/819/615/GCA_000819615.1_ViralProj14015/GCA_000819615.1_ViralProj14015_genomic.fna.gz'

save_preprocessed_reads = false

Expand Down
Loading

0 comments on commit 9ad901d

Please sign in to comment.