From 4707da13255755e324e4264ab726ecc9b29a30c4 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 11:41:33 +0100 Subject: [PATCH 01/20] First iteration towards lib removal --- main.nf | 175 ++++++++++++++++++++---- workflows/alphafold2.nf | 285 +++++++++++++++++++++++++--------------- workflows/colabfold.nf | 184 +++++++++++++++----------- 3 files changed, 432 insertions(+), 212 deletions(-) diff --git a/main.nf b/main.nf index 6e974b23..f848598a 100644 --- a/main.nf +++ b/main.nf @@ -11,14 +11,35 @@ nextflow.enable.dsl = 2 +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +*/ + +if (params.mode == "alphafold2") { + include { PREPARE_ALPHAFOLD2_DBS } from './subworkflows/local/prepare_alphafold2_dbs' + include { ALPHAFOLD2 } from './workflows/alphafold2' +} else if (params.mode == "colabfold") { + include { PREPARE_COLABFOLD_DBS } from './subworkflows/local/prepare_colabfold_dbs' + include { COLABFOLD } from './workflows/colabfold' +} else if (params.mode == "esmfold") { + include { ESMFOLD } from './workflows/esmfold' +} + +include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_proteinfold_pipeline' +include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_proteinfold_pipeline' +include { getColabfoldAlphafold2Params } from './subworkflows/local/utils_nfcore_proteinfold_pipeline' +include { getColabfoldAlphafold2ParamsPath } from './subworkflows/local/utils_nfcore_proteinfold_pipeline' + /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ COLABFOLD PARAMETER VALUES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -params.colabfold_alphafold2_params = WorkflowMain.getColabfoldAlphafold2Params(params) -params.colabfold_alphafold2_params_path = WorkflowMain.getColabfoldAlphafold2ParamsPath(params) +params.colabfold_alphafold2_params = getColabfoldAlphafold2Params() +params.colabfold_alphafold2_params_path = getColabfoldAlphafold2ParamsPath() /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -26,23 +47,23 @@ params.colabfold_alphafold2_params_path = WorkflowMain.getColabfoldAlphafold2Par ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { validateParameters; paramsHelp } from 'plugin/nf-validation' +// include { validateParameters; paramsHelp } from 'plugin/nf-validation' -// Print help message if needed -if (params.help) { - def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) - def citation = '\n' + WorkflowMain.citation(workflow) + '\n' - def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" - log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs) - System.exit(0) -} +// // Print help message if needed +// if (params.help) { +// def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) +// def citation = '\n' + WorkflowMain.citation(workflow) + '\n' +// def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" +// log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs) +// System.exit(0) +// } -// Validate input parameters -if (params.validate_params) { - validateParameters() -} +// // Validate input parameters +// if (params.validate_params) { +// validateParameters() +// } -WorkflowMain.initialise(workflow, params, log, args) +// WorkflowMain.initialise(workflow, params, log, args) /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -50,35 +71,105 @@ WorkflowMain.initialise(workflow, params, log, args) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -if (params.mode == "alphafold2") { - include { ALPHAFOLD2 } from './workflows/alphafold2' -} else if (params.mode == "colabfold") { - include { COLABFOLD } from './workflows/colabfold' -} else if (params.mode == "esmfold") { - include { ESMFOLD } from './workflows/esmfold' -} - +// +// WORKFLOW: Run main analysis pipeline +// workflow NFCORE_PROTEINFOLD { + + main: + ch_multiqc = Channel.empty() + ch_versions = Channel.empty() + // // WORKFLOW: Run alphafold2 // if(params.mode == "alphafold2") { - ALPHAFOLD2 () + // + // SUBWORKFLOW: Prepare Alphafold2 DBs + // + PREPARE_ALPHAFOLD2_DBS ( ) + ch_versions = ch_versions.mix(PREPARE_ALPHAFOLD2_DBS.out.versions) + + // + // WORKFLOW: Run nf-core/alphafold2 workflow + // + ALPHAFOLD2 ( + ch_versions, + params.full_dbs, + params.alphafold2_mode, + params.alphafold2_model_preset, + PREPARE_ALPHAFOLD2_DBS.out.params, + PREPARE_ALPHAFOLD2_DBS.out.bfd.ifEmpty([]), + PREPARE_ALPHAFOLD2_DBS.out.small_bfd.ifEmpty([]), + PREPARE_ALPHAFOLD2_DBS.out.mgnify, + PREPARE_ALPHAFOLD2_DBS.out.pdb70, + PREPARE_ALPHAFOLD2_DBS.out.pdb_mmcif, + PREPARE_ALPHAFOLD2_DBS.out.uniref30, + PREPARE_ALPHAFOLD2_DBS.out.uniref90, + PREPARE_ALPHAFOLD2_DBS.out.pdb_seqres, + PREPARE_ALPHAFOLD2_DBS.out.uniprot + ) + ch_multiqc = ALPHAFOLD2.out.multiqc_report + ch_versions = ch_versions.mix(ALPHAFOLD2.out.versions) } // // WORKFLOW: Run colabfold // else if(params.mode == "colabfold") { - COLABFOLD () + // + // SUBWORKFLOW: Prepare Colabfold DBs + // + PREPARE_COLABFOLD_DBS ( + params.colabfold_alphafold2_params_path, + params.colabfold_db_path, + params.uniref30_colabfold_path, + params.colabfold_alphafold2_params, + params.colabfold_db_link, + params.uniref30_colabfold_link, + params.create_colabfold_index + ) + ch_versions = ch_versions.mix(PREPARE_COLABFOLD_DBS.out.versions) + + // + // WORKFLOW: Run nf-core/colabfold workflow + // + COLABFOLD ( + ch_versions, + params.colabfold_model_preset, + PREPARE_COLABFOLD_DBS.out.params, + PREPARE_COLABFOLD_DBS.out.colabfold_db, + PREPARE_COLABFOLD_DBS.out.uniref30, + params.num_recycle + ) + ch_multiqc = COLABFOLD.out.multiqc_report + ch_versions = ch_versions.mix(COLABFOLD.out.versions) } // // WORKFLOW: Run esmfold // else if(params.mode == "esmfold") { - ESMFOLD () + // + // SUBWORKFLOW: Prepare esmfold DBs + // + PREPARE_ESMFOLD_DBS () + ch_versions = ch_versions.mix(PREPARE_ESMFOLD_DBS.out.versions) + + // + // WORKFLOW: Run esmfold + // + ESMFOLD ( + ch_versions, + PREPARE_ESMFOLD_DBS.out.params, + params.num_recycle + ) + ch_multiqc = ESMFOLD.out.multiqc_report + ch_versions = ch_versions.mix(ESMFOLD.out.versions) } + emit: + multiqc_report = ch_multiqc // channel: /path/to/multiqc_report.html + versions = ch_versions // channel: [version1, version2, ...] } /* @@ -92,7 +183,37 @@ workflow NFCORE_PROTEINFOLD { // See: https://github.com/nf-core/rnaseq/issues/619 // workflow { + + main: + // + // SUBWORKFLOW: Run initialisation tasks + // + PIPELINE_INITIALISATION ( + params.version, + params.help, + params.validate_params, + params.monochrome_logs, + args, + params.outdir + ) + + // + // WORKFLOW: Run main workflow + // NFCORE_PROTEINFOLD () + + // + // SUBWORKFLOW: Run completion tasks + // + PIPELINE_COMPLETION ( + params.email, + params.email_on_fail, + params.plaintext_email, + params.outdir, + params.monochrome_logs, + params.hook_url, + NFCORE_PROTEINFOLD.out.multiqc_report + ) } /* diff --git a/workflows/alphafold2.nf b/workflows/alphafold2.nf index 45f7405d..9de129dc 100644 --- a/workflows/alphafold2.nf +++ b/workflows/alphafold2.nf @@ -1,49 +1,55 @@ -/* +/* //TODO: change header ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ VALIDATE INPUTS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { paramsSummaryLog; paramsSummaryMap; fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMap } from 'plugin/nf-validation' +include { fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_proteinfold_pipeline' -def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -def summary_params = paramsSummaryMap(workflow) +// // TODO: remove Should be now in the common initialize +// // def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) +// // def citation = '\n' + WorkflowMain.citation(workflow) + '\n' +// // def summary_params = paramsSummaryMap(workflow) -// Print parameter summary log to screen -log.info logo + paramsSummaryLog(workflow) + citation +// // // Print parameter summary log to screen +// // log.info logo + paramsSummaryLog(workflow) + citation -// Validate input parameters -WorkflowAlphafold2.initialise(params, log) +// // // Validate input parameters +// // WorkflowAlphafold2.initialise(params, log) -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - CONFIG FILES -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ +// // TODO: remove +// /* +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// CONFIG FILES +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// */ -ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() -ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() -ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) +// // ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) +// // ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() +// // ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() +// // ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - IMPORT LOCAL MODULES/SUBWORKFLOWS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ +// /* +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// IMPORT LOCAL MODULES/SUBWORKFLOWS +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// */ -// -// SUBWORKFLOW: Consisting of a mix of local and nf-core/modules -// -include { PREPARE_ALPHAFOLD2_DBS } from '../subworkflows/local/prepare_alphafold2_dbs' +// // // TODO: remove +// // // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules +// // // +// // include { PREPARE_ALPHAFOLD2_DBS } from '../subworkflows/local/prepare_alphafold2_dbs' // // MODULE: Local to the pipeline // -include { RUN_ALPHAFOLD2 } from '../modules/local/run_alphafold2' -include { RUN_ALPHAFOLD2_MSA } from '../modules/local/run_alphafold2_msa' -include { RUN_ALPHAFOLD2_PRED } from '../modules/local/run_alphafold2_pred' +include { RUN_ALPHAFOLD2 } from '../modules/local/run_alphafold2' +include { RUN_ALPHAFOLD2_MSA } from '../modules/local/run_alphafold2_msa' +include { RUN_ALPHAFOLD2_PRED } from '../modules/local/run_alphafold2_pred' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -54,8 +60,9 @@ include { RUN_ALPHAFOLD2_PRED } from '../modules/local/run_alphafold2_pred' // // MODULE: Installed directly from nf-core/modules // -include { MULTIQC } from '../modules/nf-core/multiqc/main' -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' +include { MULTIQC } from '../modules/nf-core/multiqc/main' +// TODO: remove +// include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -63,13 +70,58 @@ include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoft ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// Info required for completion email and summary -def multiqc_report = [] +// TODO: remove +// // Info required for completion email and summary +// def multiqc_report = [] +// workflow ALPHAFOLD2 { + +// take: +// ch_versions +// ch_full_dbs +// ch_alphafold2_mode +// // ch_alphafold2_model_preset, +// // ch_params, +// // ch_bfd, +// // ch_small_bfd, +// // ch_mgnify, +// // ch_pdb70, +// // ch_pdb_mmcif, +// // ch_uniref30, +// // ch_uniref90, +// // ch_pdb_seqres, +// // ch_uniprot + +// main: +// println("culo.........") +// ch_multiqc_files = Channel.empty() +// ch_versions = Channel.empty() + +// emit: +// multiqc_report = ch_multiqc_files // channel: /path/to/multiqc_report.html +// versions = ch_versions +// } workflow ALPHAFOLD2 { - ch_versions = Channel.empty() + take: + ch_versions + ch_full_dbs + ch_alphafold2_mode + ch_alphafold2_model_preset + ch_alphafold2_params + ch_bfd + ch_small_bfd + ch_mgnify + ch_pdb70 + ch_pdb_mmcif + ch_uniref30 + ch_uniref90 + ch_pdb_seqres + ch_uniprot + main: + ch_multiqc_files = Channel.empty() + // // Create input channel from input file provided through params.input // @@ -77,7 +129,7 @@ workflow ALPHAFOLD2 { .fromSamplesheet("input") .set { ch_fasta } - if (params.alphafold2_model_preset != 'multimer') { + if (ch_alphafold2_model_preset != 'multimer') { ch_fasta .map { meta, fasta -> @@ -90,93 +142,103 @@ workflow ALPHAFOLD2 { // // SUBWORKFLOW: Download databases and params for Alphafold2 // - PREPARE_ALPHAFOLD2_DBS ( ) - ch_versions = ch_versions.mix(PREPARE_ALPHAFOLD2_DBS.out.versions) - - if (params.alphafold2_mode == 'standard') { + // PREPARE_ALPHAFOLD2_DBS ( ) //TODO: remove + // ch_versions = ch_versions.mix(PREPARE_ALPHAFOLD2_DBS.out.versions) + if (ch_alphafold2_mode == 'standard') { // // SUBWORKFLOW: Run Alphafold2 standard mode // RUN_ALPHAFOLD2 ( ch_fasta, - params.full_dbs, - params.alphafold2_model_preset, - PREPARE_ALPHAFOLD2_DBS.out.params, - PREPARE_ALPHAFOLD2_DBS.out.bfd.ifEmpty([]), - PREPARE_ALPHAFOLD2_DBS.out.small_bfd.ifEmpty([]), - PREPARE_ALPHAFOLD2_DBS.out.mgnify, - PREPARE_ALPHAFOLD2_DBS.out.pdb70, - PREPARE_ALPHAFOLD2_DBS.out.pdb_mmcif, - PREPARE_ALPHAFOLD2_DBS.out.uniref30, - PREPARE_ALPHAFOLD2_DBS.out.uniref90, - PREPARE_ALPHAFOLD2_DBS.out.pdb_seqres, - PREPARE_ALPHAFOLD2_DBS.out.uniprot, + ch_full_dbs, + ch_alphafold2_model_preset, + ch_alphafold2_params, + ch_bfd, + ch_small_bfd, + ch_mgnify, + ch_pdb70, + ch_pdb_mmcif, + ch_uniref30, + ch_uniref90, + ch_pdb_seqres, + ch_uniprot ) - ch_versions = ch_versions.mix(RUN_ALPHAFOLD2.out.versions) ch_multiqc_rep = RUN_ALPHAFOLD2.out.multiqc.collect() - } else if (params.alphafold2_mode == 'split_msa_prediction') { + ch_versions = ch_versions.mix(RUN_ALPHAFOLD2.out.versions) + + } else if (ch_alphafold2_mode == 'split_msa_prediction') { // // SUBWORKFLOW: Run Alphafold2 split mode, MSA and predicition // RUN_ALPHAFOLD2_MSA ( ch_fasta, - params.full_dbs, - params.alphafold2_model_preset, - PREPARE_ALPHAFOLD2_DBS.out.params, - PREPARE_ALPHAFOLD2_DBS.out.bfd.ifEmpty([]), - PREPARE_ALPHAFOLD2_DBS.out.small_bfd.ifEmpty([]), - PREPARE_ALPHAFOLD2_DBS.out.mgnify, - PREPARE_ALPHAFOLD2_DBS.out.pdb70, - PREPARE_ALPHAFOLD2_DBS.out.pdb_mmcif, - PREPARE_ALPHAFOLD2_DBS.out.uniref30, - PREPARE_ALPHAFOLD2_DBS.out.uniref90, - PREPARE_ALPHAFOLD2_DBS.out.pdb_seqres, - PREPARE_ALPHAFOLD2_DBS.out.uniprot - + ch_full_dbs, + ch_alphafold2_model_preset, + ch_alphafold2_params, + ch_bfd, + ch_small_bfd, + ch_mgnify, + ch_pdb70, + ch_pdb_mmcif, + ch_uniref30, + ch_uniref90, + ch_pdb_seqres, + ch_uniprot ) - ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_MSA.out.versions) - + ch_multiqc_rep = RUN_ALPHAFOLD2_MSA.out.multiqc.collect() + ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_MSA.out.versions) + RUN_ALPHAFOLD2_PRED ( ch_fasta, - params.full_dbs, - params.alphafold2_model_preset, - PREPARE_ALPHAFOLD2_DBS.out.params, - PREPARE_ALPHAFOLD2_DBS.out.bfd.ifEmpty([]), - PREPARE_ALPHAFOLD2_DBS.out.small_bfd.ifEmpty([]), - PREPARE_ALPHAFOLD2_DBS.out.mgnify, - PREPARE_ALPHAFOLD2_DBS.out.pdb70, - PREPARE_ALPHAFOLD2_DBS.out.pdb_mmcif, - PREPARE_ALPHAFOLD2_DBS.out.uniref30, - PREPARE_ALPHAFOLD2_DBS.out.uniref90, - PREPARE_ALPHAFOLD2_DBS.out.pdb_seqres, - PREPARE_ALPHAFOLD2_DBS.out.uniprot, + ch_full_dbs, + ch_alphafold2_model_preset, + ch_alphafold2_params, + ch_bfd, + ch_small_bfd, + ch_mgnify, + ch_pdb70, + ch_pdb_mmcif, + ch_uniref30, + ch_uniref90, + ch_pdb_seqres, + ch_uniprot, RUN_ALPHAFOLD2_MSA.out.features - ) - ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_PRED.out.versions) ch_multiqc_rep = RUN_ALPHAFOLD2_PRED.out.multiqc.collect() + ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_PRED.out.versions) } + // TODO: remove + // // + // // MODULE: Pipeline reporting + // // + // CUSTOM_DUMPSOFTWAREVERSIONS ( + // ch_versions.unique().collectFile(name: 'collated_versions.yml') + // ) + // - // MODULE: Pipeline reporting + // Collate and save software versions // - CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') - ) + softwareVersionsToYAML(ch_versions) + .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_proteinfold_software_mqc_versions.yml', sort: true, newLine: true) + .set { ch_collated_versions } // // MODULE: MultiQC // - workflow_summary = WorkflowAlphafold2.paramsSummaryMultiqc(workflow, summary_params) - ch_workflow_summary = Channel.value(workflow_summary) - - methods_description = WorkflowAlphafold2.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params) - ch_methods_description = Channel.value(methods_description) + ch_multiqc_report = Channel.empty() + ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) + ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() + ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() + summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) + ch_methods_description = Channel.value(methodsDescriptionText(ch_multiqc_custom_methods_description)) ch_multiqc_files = Channel.empty() ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) + ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_rep) MULTIQC ( @@ -185,25 +247,30 @@ workflow ALPHAFOLD2 { ch_multiqc_custom_config.toList(), ch_multiqc_logo.toList() ) - multiqc_report = MULTIQC.out.report.toList() + ch_multiqc_report = MULTIQC.out.report.toList() + + emit: + multiqc_report = ch_multiqc_report // channel: /path/to/multiqc_report.html + versions = ch_versions // channel: [ path(versions.yml) ] } -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - COMPLETION EMAIL AND SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ +// TODO: remove +// /* +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// COMPLETION EMAIL AND SUMMARY +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// */ -workflow.onComplete { - if (params.email || params.email_on_fail) { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) - } - NfcoreTemplate.dump_parameters(workflow, params) - NfcoreTemplate.summary(workflow, params, log) - if (params.hook_url) { - NfcoreTemplate.adaptivecard(workflow, params, summary_params, projectDir, log) - } -} +// workflow.onComplete { +// if (params.email || params.email_on_fail) { +// NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) +// } +// NfcoreTemplate.dump_parameters(workflow, params) +// NfcoreTemplate.summary(workflow, params, log) +// if (params.hook_url) { +// NfcoreTemplate.adaptivecard(workflow, params, summary_params, projectDir, log) +// } +// } /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/workflows/colabfold.nf b/workflows/colabfold.nf index f07495eb..6688d56c 100644 --- a/workflows/colabfold.nf +++ b/workflows/colabfold.nf @@ -1,31 +1,35 @@ -/* +/* //TODO: change header ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ VALIDATE INPUTS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { paramsSummaryLog; paramsSummaryMap; fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMap } from 'plugin/nf-validation' +include { fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_proteinfold_pipeline' -def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -def summary_params = paramsSummaryMap(workflow) +// def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) +// def citation = '\n' + WorkflowMain.citation(workflow) + '\n' +// def summary_params = paramsSummaryMap(workflow) -// Print parameter summary log to screen -log.info logo + paramsSummaryLog(workflow) + citation +// // Print parameter summary log to screen +// log.info logo + paramsSummaryLog(workflow) + citation -// Validate input parameters -WorkflowColabfold.initialise(params, log) +// // Validate input parameters +// WorkflowColabfold.initialise(params, log) -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - CONFIG FILES -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ +// /* +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// CONFIG FILES +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// */ -ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() -ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() -ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) +// ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) +// ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() +// ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() +// ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -33,10 +37,10 @@ ch_multiqc_custom_methods_description = params.multiqc_methods_description ? fil ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// -// SUBWORKFLOW: Consisting of a mix of local and nf-core/modules -// -include { PREPARE_COLABFOLD_DBS } from '../subworkflows/local/prepare_colabfold_dbs' +// // +// // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules +// // +// include { PREPARE_COLABFOLD_DBS } from '../subworkflows/local/prepare_colabfold_dbs' // // MODULE: Local to the pipeline @@ -55,7 +59,7 @@ include { MULTIFASTA_TO_CSV } from '../modules/local/multifasta_to_csv' // MODULE: Installed directly from nf-core/modules // include { MULTIQC } from '../modules/nf-core/multiqc/main' -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' +// include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -64,11 +68,20 @@ include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoft */ // Info required for completion email and summary -def multiqc_report = [] +// def multiqc_report = [] workflow COLABFOLD { - - ch_versions = Channel.empty() + + take: + ch_versions + ch_colabfold_model_preset + ch_colabfold_params + ch_colabfold_db + ch_uniref30 + ch_num_recycle + + main: + ch_multiqc_files = Channel.empty() // // Create input channel from input file provided through params.input @@ -77,8 +90,8 @@ workflow COLABFOLD { .fromSamplesheet("input") .set { ch_fasta } - PREPARE_COLABFOLD_DBS ( ) - ch_versions = ch_versions.mix(PREPARE_COLABFOLD_DBS.out.versions) + // PREPARE_COLABFOLD_DBS ( ) + // ch_versions = ch_versions.mix(PREPARE_COLABFOLD_DBS.out.versions) if (params.colabfold_server == 'webserver') { // @@ -91,21 +104,21 @@ workflow COLABFOLD { ch_versions = ch_versions.mix(MULTIFASTA_TO_CSV.out.versions) COLABFOLD_BATCH( MULTIFASTA_TO_CSV.out.input_csv, - params.colabfold_model_preset, - PREPARE_COLABFOLD_DBS.out.params, - [], - [], - params.num_recycle + ch_colabfold_model_preset, + ch_colabfold_params, + ch_colabfold_db, + ch_uniref30, + ch_num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) } else { COLABFOLD_BATCH( ch_fasta, - params.colabfold_model_preset, - PREPARE_COLABFOLD_DBS.out.params, - [], - [], - params.num_recycle + ch_colabfold_model_preset, + ch_colabfold_params, + ch_colabfold_db, + ch_uniref30, + ch_num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) } @@ -121,17 +134,17 @@ workflow COLABFOLD { ch_versions = ch_versions.mix(MULTIFASTA_TO_CSV.out.versions) MMSEQS_COLABFOLDSEARCH ( MULTIFASTA_TO_CSV.out.input_csv, - PREPARE_COLABFOLD_DBS.out.params, - PREPARE_COLABFOLD_DBS.out.colabfold_db, - PREPARE_COLABFOLD_DBS.out.uniref30, + ch_colabfold_params, + ch_colabfold_db, + ch_uniref30 ) ch_versions = ch_versions.mix(MMSEQS_COLABFOLDSEARCH.out.versions) } else { MMSEQS_COLABFOLDSEARCH ( ch_fasta, - PREPARE_COLABFOLD_DBS.out.params, - PREPARE_COLABFOLD_DBS.out.colabfold_db, - PREPARE_COLABFOLD_DBS.out.uniref30, + ch_colabfold_params, + ch_colabfold_db, + ch_uniref30 ) ch_versions = ch_versions.mix(MMSEQS_COLABFOLDSEARCH.out.versions) } @@ -141,35 +154,50 @@ workflow COLABFOLD { // COLABFOLD_BATCH( MMSEQS_COLABFOLDSEARCH.out.a3m, - params.colabfold_model_preset, - PREPARE_COLABFOLD_DBS.out.params, - PREPARE_COLABFOLD_DBS.out.colabfold_db, - PREPARE_COLABFOLD_DBS.out.uniref30, - params.num_recycle + ch_colabfold_model_preset, + ch_colabfold_params, + ch_colabfold_db, + ch_uniref30, + ch_num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) } // - // MODULE: Pipeline reporting + // Collate and save software versions // - CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') - ) + softwareVersionsToYAML(ch_versions) + .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_proteinfold_software_mqc_versions.yml', sort: true, newLine: true) + .set { ch_collated_versions } + + // // + // // MODULE: Pipeline reporting + // // + // CUSTOM_DUMPSOFTWAREVERSIONS ( + // ch_versions.unique().collectFile(name: 'collated_versions.yml') + // ) // // MODULE: MultiQC // - workflow_summary = WorkflowColabfold.paramsSummaryMultiqc(workflow, summary_params) - ch_workflow_summary = Channel.value(workflow_summary) - - methods_description = WorkflowColabfold.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params) - ch_methods_description = Channel.value(methods_description) + // workflow_summary = WorkflowColabfold.paramsSummaryMultiqc(workflow, summary_params) + // ch_workflow_summary = Channel.value(workflow_summary) + + // methods_description = WorkflowColabfold.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params) + // ch_methods_description = Channel.value(methods_description) + ch_multiqc_report = Channel.empty() + ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) + ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() + ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() + summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) + ch_methods_description = Channel.value(methodsDescriptionText(ch_multiqc_custom_methods_description)) ch_multiqc_files = Channel.empty() ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) + ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) ch_multiqc_files = ch_multiqc_files.mix(COLABFOLD_BATCH.out.multiqc.collect()) MULTIQC ( @@ -178,25 +206,29 @@ workflow COLABFOLD { ch_multiqc_custom_config.toList(), ch_multiqc_logo.toList() ) - multiqc_report = MULTIQC.out.report.toList() + ch_multiqc_report = MULTIQC.out.report.toList() + + emit: + multiqc_report = ch_multiqc_report // channel: /path/to/multiqc_report.html + versions = ch_versions // channel: [ path(versions.yml) ] } -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - COMPLETION EMAIL AND SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -workflow.onComplete { - if (params.email || params.email_on_fail) { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) - } - NfcoreTemplate.dump_parameters(workflow, params) - NfcoreTemplate.summary(workflow, params, log) - if (params.hook_url) { - NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) - } -} +// /* +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// COMPLETION EMAIL AND SUMMARY +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// */ + +// workflow.onComplete { +// if (params.email || params.email_on_fail) { +// NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) +// } +// NfcoreTemplate.dump_parameters(workflow, params) +// NfcoreTemplate.summary(workflow, params, log) +// if (params.hook_url) { +// NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) +// } +// } /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From ae8d7359f50955f23d161283329a1fa52ba98b80 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 12:53:32 +0100 Subject: [PATCH 02/20] Clean alphafold2 workflow --- workflows/alphafold2.nf | 173 +++++++++------------------------------- 1 file changed, 39 insertions(+), 134 deletions(-) diff --git a/workflows/alphafold2.nf b/workflows/alphafold2.nf index 9de129dc..f2c95211 100644 --- a/workflows/alphafold2.nf +++ b/workflows/alphafold2.nf @@ -1,55 +1,15 @@ -/* //TODO: change header +/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - VALIDATE INPUTS + IMPORT LOCAL MODULES/SUBWORKFLOWS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { paramsSummaryMap } from 'plugin/nf-validation' -include { fromSamplesheet } from 'plugin/nf-validation' -include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' -include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' -include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_proteinfold_pipeline' - -// // TODO: remove Should be now in the common initialize -// // def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -// // def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -// // def summary_params = paramsSummaryMap(workflow) - -// // // Print parameter summary log to screen -// // log.info logo + paramsSummaryLog(workflow) + citation - -// // // Validate input parameters -// // WorkflowAlphafold2.initialise(params, log) - -// // TODO: remove -// /* -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// CONFIG FILES -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// */ - -// // ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -// // ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() -// // ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() -// // ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) - -// /* -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// IMPORT LOCAL MODULES/SUBWORKFLOWS -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// */ - -// // // TODO: remove -// // // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules -// // // -// // include { PREPARE_ALPHAFOLD2_DBS } from '../subworkflows/local/prepare_alphafold2_dbs' - // -// MODULE: Local to the pipeline +// MODULE: Loaded from modules/local/ // -include { RUN_ALPHAFOLD2 } from '../modules/local/run_alphafold2' -include { RUN_ALPHAFOLD2_MSA } from '../modules/local/run_alphafold2_msa' -include { RUN_ALPHAFOLD2_PRED } from '../modules/local/run_alphafold2_pred' +include { RUN_ALPHAFOLD2 } from '../modules/local/run_alphafold2' +include { RUN_ALPHAFOLD2_MSA } from '../modules/local/run_alphafold2_msa' +include { RUN_ALPHAFOLD2_PRED } from '../modules/local/run_alphafold2_pred' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -61,8 +21,15 @@ include { RUN_ALPHAFOLD2_PRED } from '../modules/local/run_alphafold2_pred' // MODULE: Installed directly from nf-core/modules // include { MULTIQC } from '../modules/nf-core/multiqc/main' -// TODO: remove -// include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' + +// +// SUBWORKFLOW: Consisting entirely of nf-core/modules +// +include { paramsSummaryMap } from 'plugin/nf-validation' +include { fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_proteinfold_pipeline' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -70,54 +37,23 @@ include { MULTIQC } from '../modules/nf-core/multiqc/main' ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// TODO: remove -// // Info required for completion email and summary -// def multiqc_report = [] -// workflow ALPHAFOLD2 { - -// take: -// ch_versions -// ch_full_dbs -// ch_alphafold2_mode -// // ch_alphafold2_model_preset, -// // ch_params, -// // ch_bfd, -// // ch_small_bfd, -// // ch_mgnify, -// // ch_pdb70, -// // ch_pdb_mmcif, -// // ch_uniref30, -// // ch_uniref90, -// // ch_pdb_seqres, -// // ch_uniprot - -// main: -// println("culo.........") -// ch_multiqc_files = Channel.empty() -// ch_versions = Channel.empty() - -// emit: -// multiqc_report = ch_multiqc_files // channel: /path/to/multiqc_report.html -// versions = ch_versions -// } - workflow ALPHAFOLD2 { - take: - ch_versions - ch_full_dbs - ch_alphafold2_mode - ch_alphafold2_model_preset - ch_alphafold2_params - ch_bfd - ch_small_bfd - ch_mgnify - ch_pdb70 - ch_pdb_mmcif - ch_uniref30 - ch_uniref90 - ch_pdb_seqres - ch_uniprot + take: + ch_versions // channel: [ path(versions.yml) ] + full_dbs // boolean: Use full databases (otherwise reduced version) + alphafold2_mode // string: Mode to run Alphafold2 in + alphafold2_model_preset // string: Specifies the model preset to use for Alphafold2 + ch_alphafold2_params // channel: path(alphafold2_params) + ch_bfd // channel: path(bfd) + ch_small_bfd // channel: path(small_bfd) + ch_mgnify // channel: path(mgnify) + ch_pdb70 // channel: path(pdb70) + ch_pdb_mmcif // channel: path(pdb_mmcif) + ch_uniref30 // channel: path(uniref30) + ch_uniref90 // channel: path(uniref90) + ch_pdb_seqres // channel: path(pdb_seqres) + ch_uniprot // channel: path(uniprot) main: ch_multiqc_files = Channel.empty() @@ -129,7 +65,7 @@ workflow ALPHAFOLD2 { .fromSamplesheet("input") .set { ch_fasta } - if (ch_alphafold2_model_preset != 'multimer') { + if (alphafold2_model_preset != 'multimer') { ch_fasta .map { meta, fasta -> @@ -139,19 +75,14 @@ workflow ALPHAFOLD2 { .set { ch_fasta } } - // - // SUBWORKFLOW: Download databases and params for Alphafold2 - // - // PREPARE_ALPHAFOLD2_DBS ( ) //TODO: remove - // ch_versions = ch_versions.mix(PREPARE_ALPHAFOLD2_DBS.out.versions) - if (ch_alphafold2_mode == 'standard') { + if (alphafold2_mode == 'standard') { // // SUBWORKFLOW: Run Alphafold2 standard mode // RUN_ALPHAFOLD2 ( ch_fasta, - ch_full_dbs, - ch_alphafold2_model_preset, + full_dbs, + alphafold2_model_preset, ch_alphafold2_params, ch_bfd, ch_small_bfd, @@ -166,14 +97,14 @@ workflow ALPHAFOLD2 { ch_multiqc_rep = RUN_ALPHAFOLD2.out.multiqc.collect() ch_versions = ch_versions.mix(RUN_ALPHAFOLD2.out.versions) - } else if (ch_alphafold2_mode == 'split_msa_prediction') { + } else if (alphafold2_mode == 'split_msa_prediction') { // // SUBWORKFLOW: Run Alphafold2 split mode, MSA and predicition // RUN_ALPHAFOLD2_MSA ( ch_fasta, - ch_full_dbs, - ch_alphafold2_model_preset, + full_dbs, + alphafold2_model_preset, ch_alphafold2_params, ch_bfd, ch_small_bfd, @@ -190,8 +121,8 @@ workflow ALPHAFOLD2 { RUN_ALPHAFOLD2_PRED ( ch_fasta, - ch_full_dbs, - ch_alphafold2_model_preset, + full_dbs, + alphafold2_model_preset, ch_alphafold2_params, ch_bfd, ch_small_bfd, @@ -207,14 +138,6 @@ workflow ALPHAFOLD2 { ch_multiqc_rep = RUN_ALPHAFOLD2_PRED.out.multiqc.collect() ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_PRED.out.versions) } - - // TODO: remove - // // - // // MODULE: Pipeline reporting - // // - // CUSTOM_DUMPSOFTWAREVERSIONS ( - // ch_versions.unique().collectFile(name: 'collated_versions.yml') - // ) // // Collate and save software versions @@ -254,24 +177,6 @@ workflow ALPHAFOLD2 { versions = ch_versions // channel: [ path(versions.yml) ] } -// TODO: remove -// /* -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// COMPLETION EMAIL AND SUMMARY -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// */ - -// workflow.onComplete { -// if (params.email || params.email_on_fail) { -// NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) -// } -// NfcoreTemplate.dump_parameters(workflow, params) -// NfcoreTemplate.summary(workflow, params, log) -// if (params.hook_url) { -// NfcoreTemplate.adaptivecard(workflow, params, summary_params, projectDir, log) -// } -// } - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ THE END From d9834c058ffa822636279d7fa51f1b9a40a6eb0d Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 12:58:19 +0100 Subject: [PATCH 03/20] Clean colabfold workflow --- workflows/colabfold.nf | 111 +++++++++-------------------------------- 1 file changed, 23 insertions(+), 88 deletions(-) diff --git a/workflows/colabfold.nf b/workflows/colabfold.nf index 6688d56c..04aab71f 100644 --- a/workflows/colabfold.nf +++ b/workflows/colabfold.nf @@ -1,49 +1,11 @@ -/* //TODO: change header -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - VALIDATE INPUTS -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -include { paramsSummaryMap } from 'plugin/nf-validation' -include { fromSamplesheet } from 'plugin/nf-validation' -include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' -include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' -include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_proteinfold_pipeline' - -// def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -// def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -// def summary_params = paramsSummaryMap(workflow) - -// // Print parameter summary log to screen -// log.info logo + paramsSummaryLog(workflow) + citation - -// // Validate input parameters -// WorkflowColabfold.initialise(params, log) - -// /* -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// CONFIG FILES -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// */ - -// ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -// ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() -// ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() -// ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT LOCAL MODULES/SUBWORKFLOWS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// // -// // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules -// // -// include { PREPARE_COLABFOLD_DBS } from '../subworkflows/local/prepare_colabfold_dbs' - // -// MODULE: Local to the pipeline +// MODULE: Loaded from modules/local/ // include { COLABFOLD_BATCH } from '../modules/local/colabfold_batch' include { MMSEQS_COLABFOLDSEARCH } from '../modules/local/mmseqs_colabfoldsearch' @@ -58,8 +20,16 @@ include { MULTIFASTA_TO_CSV } from '../modules/local/multifasta_to_csv' // // MODULE: Installed directly from nf-core/modules // -include { MULTIQC } from '../modules/nf-core/multiqc/main' -// include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' +include { MULTIQC } from '../modules/nf-core/multiqc/main' + +// +// SUBWORKFLOW: Consisting entirely of nf-core/modules +// +include { paramsSummaryMap } from 'plugin/nf-validation' +include { fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_proteinfold_pipeline' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -67,18 +37,15 @@ include { MULTIQC } from '../modules/nf-core/multiqc/main' ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// Info required for completion email and summary -// def multiqc_report = [] - workflow COLABFOLD { take: - ch_versions - ch_colabfold_model_preset - ch_colabfold_params - ch_colabfold_db - ch_uniref30 - ch_num_recycle + ch_versions // channel: [ path(versions.yml) ] + colabfold_model_preset // string: Specifies the model preset to use for colabfold + ch_colabfold_params // channel: path(colabfold_params) + ch_colabfold_db // channel: path(colabfold_db) + ch_uniref30 // channel: path(uniref30) + num_recycle // int: Number of recycles for esmfold main: ch_multiqc_files = Channel.empty() @@ -90,9 +57,6 @@ workflow COLABFOLD { .fromSamplesheet("input") .set { ch_fasta } - // PREPARE_COLABFOLD_DBS ( ) - // ch_versions = ch_versions.mix(PREPARE_COLABFOLD_DBS.out.versions) - if (params.colabfold_server == 'webserver') { // // MODULE: Run colabfold @@ -104,21 +68,21 @@ workflow COLABFOLD { ch_versions = ch_versions.mix(MULTIFASTA_TO_CSV.out.versions) COLABFOLD_BATCH( MULTIFASTA_TO_CSV.out.input_csv, - ch_colabfold_model_preset, + colabfold_model_preset, ch_colabfold_params, ch_colabfold_db, ch_uniref30, - ch_num_recycle + num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) } else { COLABFOLD_BATCH( ch_fasta, - ch_colabfold_model_preset, + colabfold_model_preset, ch_colabfold_params, ch_colabfold_db, ch_uniref30, - ch_num_recycle + num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) } @@ -154,11 +118,11 @@ workflow COLABFOLD { // COLABFOLD_BATCH( MMSEQS_COLABFOLDSEARCH.out.a3m, - ch_colabfold_model_preset, + colabfold_model_preset, ch_colabfold_params, ch_colabfold_db, ch_uniref30, - ch_num_recycle + num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) } @@ -170,21 +134,9 @@ workflow COLABFOLD { .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_proteinfold_software_mqc_versions.yml', sort: true, newLine: true) .set { ch_collated_versions } - // // - // // MODULE: Pipeline reporting - // // - // CUSTOM_DUMPSOFTWAREVERSIONS ( - // ch_versions.unique().collectFile(name: 'collated_versions.yml') - // ) - // // MODULE: MultiQC // - // workflow_summary = WorkflowColabfold.paramsSummaryMultiqc(workflow, summary_params) - // ch_workflow_summary = Channel.value(workflow_summary) - - // methods_description = WorkflowColabfold.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params) - // ch_methods_description = Channel.value(methods_description) ch_multiqc_report = Channel.empty() ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() @@ -213,23 +165,6 @@ workflow COLABFOLD { versions = ch_versions // channel: [ path(versions.yml) ] } -// /* -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// COMPLETION EMAIL AND SUMMARY -// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -// */ - -// workflow.onComplete { -// if (params.email || params.email_on_fail) { -// NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) -// } -// NfcoreTemplate.dump_parameters(workflow, params) -// NfcoreTemplate.summary(workflow, params, log) -// if (params.hook_url) { -// NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) -// } -// } - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ THE END From d1982dde04fc9857e46e15d3e46d3975938f8f17 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 13:03:58 +0100 Subject: [PATCH 04/20] Clean esmfold --- workflows/esmfold.nf | 122 ++++++++++++++----------------------------- 1 file changed, 39 insertions(+), 83 deletions(-) diff --git a/workflows/esmfold.nf b/workflows/esmfold.nf index 70b64fc3..b372480c 100644 --- a/workflows/esmfold.nf +++ b/workflows/esmfold.nf @@ -1,32 +1,3 @@ -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - PRINT PARAMS SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -include { paramsSummaryLog; paramsSummaryMap; fromSamplesheet } from 'plugin/nf-validation' - -def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -def summary_params = paramsSummaryMap(workflow) - -// Print parameter summary log to screen -log.info logo + paramsSummaryLog(workflow) + citation - -// Validate input parameters -WorkflowEsmfold.initialise(params, log) - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - CONFIG FILES -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() -ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() -ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT LOCAL MODULES/SUBWORKFLOWS @@ -34,12 +5,7 @@ ch_multiqc_custom_methods_description = params.multiqc_methods_description ? fil */ // -// SUBWORKFLOW: Consisting of a mix of local and nf-core/modules -// -include { PREPARE_ESMFOLD_DBS } from '../subworkflows/local/prepare_esmfold_dbs' - -// -// MODULE: Local to the pipeline +// MODULE: Loaded from modules/local/ // include { RUN_ESMFOLD } from '../modules/local/run_esmfold' include { MULTIFASTA_TO_SINGLEFASTA } from '../modules/local/multifasta_to_singlefasta' @@ -53,8 +19,16 @@ include { MULTIFASTA_TO_SINGLEFASTA } from '../modules/local/multifasta_to_singl // // MODULE: Installed directly from nf-core/modules // -include { MULTIQC } from '../modules/nf-core/multiqc/main' -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' +include { MULTIQC } from '../modules/nf-core/multiqc/main' + +// +// SUBWORKFLOW: Consisting entirely of nf-core/modules +// +include { paramsSummaryMap } from 'plugin/nf-validation' +include { fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_proteinfold_pipeline' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -62,12 +36,15 @@ include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoft ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -// Info required for completion email and summary -def multiqc_report = [] - workflow ESMFOLD { - ch_versions = Channel.empty() + take: + ch_versions // channel: [ path(versions.yml) ] + ch_esmfold_params // directory: /path/to/esmfold/params/ + ch_num_recycle // int: Number of recycles for esmfold + + main: + ch_multiqc_files = Channel.empty() // // Create input channel from input file provided through params.input @@ -76,9 +53,6 @@ workflow ESMFOLD { .fromSamplesheet("input") .set { ch_fasta } - PREPARE_ESMFOLD_DBS( ) - ch_versions = ch_versions.mix(PREPARE_ESMFOLD_DBS.out.versions) - // // MODULE: Run esmfold // @@ -89,39 +63,42 @@ workflow ESMFOLD { ch_versions = ch_versions.mix(MULTIFASTA_TO_SINGLEFASTA.out.versions) RUN_ESMFOLD( MULTIFASTA_TO_SINGLEFASTA.out.input_fasta, - PREPARE_ESMFOLD_DBS.out.params, - params.num_recycles + ch_esmfold_params, + ch_num_recycle ) ch_versions = ch_versions.mix(RUN_ESMFOLD.out.versions) } else { RUN_ESMFOLD( ch_fasta, - PREPARE_ESMFOLD_DBS.out.params, - params.num_recycles + ch_esmfold_params, + ch_num_recycle ) ch_versions = ch_versions.mix(RUN_ESMFOLD.out.versions) } // - // MODULE: Pipeline reporting + // Collate and save software versions // - CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') - ) + softwareVersionsToYAML(ch_versions) + .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_proteinfold_software_mqc_versions.yml', sort: true, newLine: true) + .set { ch_collated_versions } // // MODULE: MultiQC // - workflow_summary = WorkflowEsmfold.paramsSummaryMultiqc(workflow, summary_params) - ch_workflow_summary = Channel.value(workflow_summary) - - methods_description = WorkflowEsmfold.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params) - ch_methods_description = Channel.value(methods_description) + ch_multiqc_report = Channel.empty() + ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) + ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config ) : Channel.empty() + ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo ) : Channel.empty() + summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + ch_multiqc_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) + ch_methods_description = Channel.value(methodsDescriptionText(ch_multiqc_methods_description)) ch_multiqc_files = Channel.empty() ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) + ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) ch_multiqc_files = ch_multiqc_files.mix(RUN_ESMFOLD.out.multiqc.collect()) MULTIQC ( @@ -130,31 +107,10 @@ workflow ESMFOLD { ch_multiqc_custom_config.toList(), ch_multiqc_logo.toList() ) - multiqc_report = MULTIQC.out.report.toList() -} - -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - COMPLETION EMAIL AND SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -workflow.onComplete { - if (params.email || params.email_on_fail) { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) - } - NfcoreTemplate.dump_parameters(workflow, params) - NfcoreTemplate.summary(workflow, params, log) - if (params.hook_url) { - NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log) - } -} - -workflow.onError { - if (workflow.errorReport.contains("Process requirement exceeds available memory")) { - println("🛑 Default resources exceed availability 🛑 ") - println("💡 See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources 💡") - } + ch_multiqc_report = MULTIQC.out.report.toList() + emit: + multiqc_report = ch_multiqc_report // channel: /path/to/multiqc_report.html + versions = ch_versions // channel: [ path(versions.yml) ] } /* From e678a6ce3d605dd25669917700ae867fda9d23c3 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 14:55:32 +0100 Subject: [PATCH 05/20] Clean prepare_esmfold_dbs --- subworkflows/local/prepare_esmfold_dbs.nf | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/subworkflows/local/prepare_esmfold_dbs.nf b/subworkflows/local/prepare_esmfold_dbs.nf index 864037c3..b6f3a1ff 100644 --- a/subworkflows/local/prepare_esmfold_dbs.nf +++ b/subworkflows/local/prepare_esmfold_dbs.nf @@ -7,21 +7,28 @@ include { ARIA2 as ARIA2_ESM2_T36_3B_UR50D } from '../../modu include { ARIA2 as ARIA2_ESM2_T36_3B_UR50D_CONTACT_REGRESSION } from '../../modules/nf-core/aria2/main' workflow PREPARE_ESMFOLD_DBS { + + take: + esmfold_db_path // directory: /path/to/esmfold/db/ + esmfold_3B_v1 // string: Specifies the link to download esmfold 3B v1 + esm2_t36_3B_UR50D // string: Specifies the link to download esm2 t36 3B UR50D + esm2_t36_3B_UR50D_contact_regression // string: Specifies the link to download esm2 t36 3B UR50D contact regression + main: ch_versions = Channel.empty() - if (params.esmfold_db) { - ch_params = file( params.esmfold_params_path, type: 'file' ) + if (esmfold_db) { + ch_params = file( esmfold_params_path, type: 'file' ) } else { ARIA2_ESMFOLD_3B_V1 ( - params.esmfold_3B_v1 + esmfold_3B_v1 ) ARIA2_ESM2_T36_3B_UR50D ( - params.esm2_t36_3B_UR50D + esm2_t36_3B_UR50D ) ARIA2_ESM2_T36_3B_UR50D_CONTACT_REGRESSION ( - params.esm2_t36_3B_UR50D_contact_regression + esm2_t36_3B_UR50D_contact_regression ) ch_params = ARIA2_ESMFOLD_3B_V1.out.downloaded_file.mix(ARIA2_ESM2_T36_3B_UR50D.out.downloaded_file,ARIA2_ESM2_T36_3B_UR50D_CONTACT_REGRESSION.out.downloaded_file).collect() ch_versions = ch_versions.mix(ARIA2_ESMFOLD_3B_V1.out.versions) From 7945ca7135a534f0b483fef6cd612fe46c223e3b Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 14:55:58 +0100 Subject: [PATCH 06/20] Fix stub section --- modules/local/run_esmfold.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/run_esmfold.nf b/modules/local/run_esmfold.nf index 3f8e6c02..4d5645c3 100644 --- a/modules/local/run_esmfold.nf +++ b/modules/local/run_esmfold.nf @@ -43,7 +43,7 @@ process RUN_ESMFOLD { def VERSION = '1.0.3' // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions. """ touch ./"${fasta.baseName}".pdb - touch ./"${fasta.baseName}"_mqc.tsv + touch ./"${fasta.baseName}"_plddt_mqc.tsv cat <<-END_VERSIONS > versions.yml "${task.process}": From 619ac8d2abfb832526f845fb863dae74711e8315 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 15:58:49 +0100 Subject: [PATCH 07/20] Update main --- main.nf | 58 ++++++++++++++++++++++++++++----------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/main.nf b/main.nf index f848598a..4449093f 100644 --- a/main.nf +++ b/main.nf @@ -22,9 +22,10 @@ if (params.mode == "alphafold2") { include { ALPHAFOLD2 } from './workflows/alphafold2' } else if (params.mode == "colabfold") { include { PREPARE_COLABFOLD_DBS } from './subworkflows/local/prepare_colabfold_dbs' - include { COLABFOLD } from './workflows/colabfold' + include { COLABFOLD } from './workflows/colabfold' } else if (params.mode == "esmfold") { - include { ESMFOLD } from './workflows/esmfold' + include { PREPARE_ESMFOLD_DBS } from './subworkflows/local/prepare_esmfold_dbs' + include { ESMFOLD } from './workflows/esmfold' } include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_proteinfold_pipeline' @@ -41,30 +42,6 @@ include { getColabfoldAlphafold2ParamsPath } from './subworkflows/local/utils_nf params.colabfold_alphafold2_params = getColabfoldAlphafold2Params() params.colabfold_alphafold2_params_path = getColabfoldAlphafold2ParamsPath() -/* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - VALIDATE & PRINT PARAMETER SUMMARY -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -*/ - -// include { validateParameters; paramsHelp } from 'plugin/nf-validation' - -// // Print help message if needed -// if (params.help) { -// def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -// def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -// def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" -// log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs) -// System.exit(0) -// } - -// // Validate input parameters -// if (params.validate_params) { -// validateParameters() -// } - -// WorkflowMain.initialise(workflow, params, log, args) - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ NAMED WORKFLOW FOR PIPELINE @@ -87,7 +64,22 @@ workflow NFCORE_PROTEINFOLD { // // SUBWORKFLOW: Prepare Alphafold2 DBs // - PREPARE_ALPHAFOLD2_DBS ( ) + PREPARE_ALPHAFOLD2_DBS ( + params.alphafold2_db, + params.full_dbs, + params.bfd_path, + params.small_bfd_path, + params.alphafold2_params_path, + params.mgnify_path, + params.pdb70_path, + params.pdb_mmcif_path, + params.uniref30_alphafold2_path, + params.uniref90_path, + params.pdb_seqres_path, + params.uniprot_path, + params.uniprot_sprot, + params.uniprot_trembl + ) ch_versions = ch_versions.mix(PREPARE_ALPHAFOLD2_DBS.out.versions) // @@ -121,6 +113,8 @@ workflow NFCORE_PROTEINFOLD { // SUBWORKFLOW: Prepare Colabfold DBs // PREPARE_COLABFOLD_DBS ( + params.colabfold_db, + params.colabfold_server, params.colabfold_alphafold2_params_path, params.colabfold_db_path, params.uniref30_colabfold_path, @@ -153,11 +147,17 @@ workflow NFCORE_PROTEINFOLD { // // SUBWORKFLOW: Prepare esmfold DBs // - PREPARE_ESMFOLD_DBS () + PREPARE_ESMFOLD_DBS ( + params.esmfold_db, + params.esmfold_params_path, + params.esmfold_3B_v1, + params.esm2_t36_3B_UR50D, + params.esm2_t36_3B_UR50D_contact_regression + ) ch_versions = ch_versions.mix(PREPARE_ESMFOLD_DBS.out.versions) // - // WORKFLOW: Run esmfold + // WORKFLOW: Run nf-core/esmfold workflow // ESMFOLD ( ch_versions, From bba9830cb64a28d585c2ef9a6e5c44b499013815 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 16:03:02 +0100 Subject: [PATCH 08/20] Update subworkflows --- modules.json | 19 + .../nf-core/utils_nextflow_pipeline/main.nf | 126 +++++ .../nf-core/utils_nextflow_pipeline/meta.yml | 38 ++ .../tests/main.function.nf.test | 54 +++ .../tests/main.function.nf.test.snap | 12 + .../tests/main.workflow.nf.test | 123 +++++ .../utils_nextflow_pipeline/tests/tags.yml | 2 + .../nf-core/utils_nfcore_pipeline/main.nf | 440 ++++++++++++++++++ .../nf-core/utils_nfcore_pipeline/meta.yml | 24 + .../tests/main.function.nf.test | 134 ++++++ .../tests/main.function.nf.test.snap | 138 ++++++ .../tests/main.workflow.nf.test | 29 ++ .../tests/main.workflow.nf.test.snap | 15 + .../utils_nfcore_pipeline/tests/tags.yml | 2 + .../nf-core/utils_nfvalidation_plugin/main.nf | 62 +++ .../utils_nfvalidation_plugin/meta.yml | 44 ++ .../tests/main.nf.test | 200 ++++++++ .../tests/nextflow_schema.json | 96 ++++ .../utils_nfvalidation_plugin/tests/tags.yml | 2 + 19 files changed, 1560 insertions(+) create mode 100644 subworkflows/nf-core/utils_nextflow_pipeline/main.nf create mode 100644 subworkflows/nf-core/utils_nextflow_pipeline/meta.yml create mode 100644 subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test create mode 100644 subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap create mode 100644 subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test create mode 100644 subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/main.nf create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/meta.yml create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml create mode 100644 subworkflows/nf-core/utils_nfvalidation_plugin/main.nf create mode 100644 subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml create mode 100644 subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test create mode 100644 subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json create mode 100644 subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml diff --git a/modules.json b/modules.json index 5f59deaf..c0360e4a 100644 --- a/modules.json +++ b/modules.json @@ -42,6 +42,25 @@ "patch": "modules/nf-core/untar/untar.diff" } } + }, + "subworkflows": { + "nf-core": { + "utils_nextflow_pipeline": { + "branch": "master", + "git_sha": "cd08c91373cd00a73255081340e4914485846ba1", + "installed_by": ["subworkflows"] + }, + "utils_nfcore_pipeline": { + "branch": "master", + "git_sha": "262b17ed2aad591039f914951659177e6c39a8d8", + "installed_by": ["subworkflows"] + }, + "utils_nfvalidation_plugin": { + "branch": "master", + "git_sha": "cd08c91373cd00a73255081340e4914485846ba1", + "installed_by": ["subworkflows"] + } + } } } } diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf new file mode 100644 index 00000000..ac31f28f --- /dev/null +++ b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -0,0 +1,126 @@ +// +// Subworkflow with functionality that may be useful for any Nextflow pipeline +// + +import org.yaml.snakeyaml.Yaml +import groovy.json.JsonOutput +import nextflow.extension.FilesEx + +/* +======================================================================================== + SUBWORKFLOW DEFINITION +======================================================================================== +*/ + +workflow UTILS_NEXTFLOW_PIPELINE { + + take: + print_version // boolean: print version + dump_parameters // boolean: dump parameters + outdir // path: base directory used to publish pipeline results + check_conda_channels // boolean: check conda channels + + main: + + // + // Print workflow version and exit on --version + // + if (print_version) { + log.info "${workflow.manifest.name} ${getWorkflowVersion()}" + System.exit(0) + } + + // + // Dump pipeline parameters to a JSON file + // + if (dump_parameters && outdir) { + dumpParametersToJSON(outdir) + } + + // + // When running with Conda, warn if channels have not been set-up appropriately + // + if (check_conda_channels) { + checkCondaChannels() + } + + emit: + dummy_emit = true +} + +/* +======================================================================================== + FUNCTIONS +======================================================================================== +*/ + +// +// Generate version string +// +def getWorkflowVersion() { + String version_string = "" + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string +} + +// +// Dump pipeline parameters to a JSON file +// +def dumpParametersToJSON(outdir) { + def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') + def filename = "params_${timestamp}.json" + def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") + def jsonStr = JsonOutput.toJson(params) + temp_pf.text = JsonOutput.prettyPrint(jsonStr) + + FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") + temp_pf.delete() +} + +// +// When running with -profile conda, warn if channels have not been set-up appropriately +// +def checkCondaChannels() { + Yaml parser = new Yaml() + def channels = [] + try { + def config = parser.load("conda config --show channels".execute().text) + channels = config.channels + } catch(NullPointerException | IOException e) { + log.warn "Could not verify conda channel configuration." + return + } + + // Check that all channels are present + // This channel list is ordered by required channel priority. + def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] + def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean + + // Check that they are in the right order + def channel_priority_violation = false + def n = required_channels_in_order.size() + for (int i = 0; i < n - 1; i++) { + channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) + } + + if (channels_missing | channel_priority_violation) { + log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + + " There is a problem with your Conda configuration!\n\n" + + " You will need to set-up the conda-forge and bioconda channels correctly.\n" + + " Please refer to https://bioconda.github.io/\n" + + " The observed channel order is \n" + + " ${channels}\n" + + " but the following channel order is required:\n" + + " ${required_channels_in_order}\n" + + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + } +} diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml b/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml new file mode 100644 index 00000000..e5c3a0a8 --- /dev/null +++ b/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml @@ -0,0 +1,38 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "UTILS_NEXTFLOW_PIPELINE" +description: Subworkflow with functionality that may be useful for any Nextflow pipeline +keywords: + - utility + - pipeline + - initialise + - version +components: [] +input: + - print_version: + type: boolean + description: | + Print the version of the pipeline and exit + - dump_parameters: + type: boolean + description: | + Dump the parameters of the pipeline to a JSON file + - output_directory: + type: directory + description: Path to output dir to write JSON file to. + pattern: "results/" + - check_conda_channel: + type: boolean + description: | + Check if the conda channel priority is correct. +output: + - dummy_emit: + type: boolean + description: | + Dummy emit to make nf-core subworkflows lint happy +authors: + - "@adamrtalbot" + - "@drpatelh" +maintainers: + - "@adamrtalbot" + - "@drpatelh" + - "@maxulysse" diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test new file mode 100644 index 00000000..8ed4310c --- /dev/null +++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test @@ -0,0 +1,54 @@ + +nextflow_function { + + name "Test Functions" + script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" + config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" + tag 'subworkflows' + tag 'utils_nextflow_pipeline' + tag 'subworkflows/utils_nextflow_pipeline' + + test("Test Function getWorkflowVersion") { + + function "getWorkflowVersion" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function dumpParametersToJSON") { + + function "dumpParametersToJSON" + + when { + function { + """ + // define inputs of the function here. Example: + input[0] = "$outputDir" + """.stripIndent() + } + } + + then { + assertAll( + { assert function.success } + ) + } + } + + test("Test Function checkCondaChannels") { + + function "checkCondaChannels" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } +} \ No newline at end of file diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap new file mode 100644 index 00000000..db2030f8 --- /dev/null +++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap @@ -0,0 +1,12 @@ +{ + "Test Function getWorkflowVersion": { + "content": [ + "v9.9.9" + ], + "timestamp": "2024-01-19T11:32:36.031083" + }, + "Test Function checkCondaChannels": { + "content": null, + "timestamp": "2024-01-19T11:32:50.456" + } +} \ No newline at end of file diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test new file mode 100644 index 00000000..f7c54bc6 --- /dev/null +++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -0,0 +1,123 @@ +nextflow_workflow { + + name "Test Workflow UTILS_NEXTFLOW_PIPELINE" + script "../main.nf" + config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" + workflow "UTILS_NEXTFLOW_PIPELINE" + tag 'subworkflows' + tag 'utils_nextflow_pipeline' + tag 'subworkflows/utils_nextflow_pipeline' + + test("Should run no inputs") { + + when { + params { + outdir = "tests/results" + } + workflow { + """ + print_version = false + dump_parameters = false + outdir = null + check_conda_channels = false + + input[0] = print_version + input[1] = dump_parameters + input[2] = outdir + input[3] = check_conda_channels + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should print version") { + + when { + params { + outdir = "tests/results" + } + workflow { + """ + print_version = true + dump_parameters = false + outdir = null + check_conda_channels = false + + input[0] = print_version + input[1] = dump_parameters + input[2] = outdir + input[3] = check_conda_channels + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } + ) + } + } + + test("Should dump params") { + + when { + params { + outdir = "$outputDir" + } + workflow { + """ + print_version = false + dump_parameters = true + outdir = params.outdir + check_conda_channels = false + + input[0] = false + input[1] = true + input[2] = params.outdir + input[3] = false + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should not create params JSON if no output directory") { + + when { + params { + outdir = "$outputDir" + } + workflow { + """ + print_version = false + dump_parameters = true + outdir = params.outdir + check_conda_channels = false + + input[0] = false + input[1] = true + input[2] = null + input[3] = false + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } +} diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml b/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml new file mode 100644 index 00000000..f8476112 --- /dev/null +++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/utils_nextflow_pipeline: + - subworkflows/nf-core/utils_nextflow_pipeline/** diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf new file mode 100644 index 00000000..a8b55d6f --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -0,0 +1,440 @@ +// +// Subworkflow with utility functions specific to the nf-core pipeline template +// + +import org.yaml.snakeyaml.Yaml +import nextflow.extension.FilesEx + +/* +======================================================================================== + SUBWORKFLOW DEFINITION +======================================================================================== +*/ + +workflow UTILS_NFCORE_PIPELINE { + + take: + nextflow_cli_args + + main: + valid_config = checkConfigProvided() + checkProfileProvided(nextflow_cli_args) + + emit: + valid_config +} + +/* +======================================================================================== + FUNCTIONS +======================================================================================== +*/ + +// +// Warn if a -profile or Nextflow config has not been provided to run the pipeline +// +def checkConfigProvided() { + valid_config = true + if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { + log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + + "Please refer to the quick start section and usage docs for the pipeline.\n " + valid_config = false + } + return valid_config +} + +// +// Exit pipeline if --profile contains spaces +// +def checkProfileProvided(nextflow_cli_args) { + if (workflow.profile.endsWith(',')) { + error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + } + if (nextflow_cli_args[0]) { + log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + } +} + +// +// Citation string for pipeline +// +def workflowCitation() { + return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + + "* The pipeline\n" + + " ${workflow.manifest.doi}\n\n" + + "* The nf-core framework\n" + + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + + "* Software dependencies\n" + + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" +} + +// +// Generate workflow version string +// +def getWorkflowVersion() { + String version_string = "" + if (workflow.manifest.version) { + def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' + version_string += "${prefix_v}${workflow.manifest.version}" + } + + if (workflow.commitId) { + def git_shortsha = workflow.commitId.substring(0, 7) + version_string += "-g${git_shortsha}" + } + + return version_string +} + +// +// Get software versions for pipeline +// +def processVersionsFromYAML(yaml_file) { + Yaml yaml = new Yaml() + versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] } + return yaml.dumpAsMap(versions).trim() +} + +// +// Get workflow version for pipeline +// +def workflowVersionToYAML() { + return """ + Workflow: + $workflow.manifest.name: ${getWorkflowVersion()} + Nextflow: $workflow.nextflow.version + """.stripIndent().trim() +} + +// +// Get channel of software versions used in pipeline in YAML format +// +def softwareVersionsToYAML(ch_versions) { + return ch_versions + .unique() + .map { processVersionsFromYAML(it) } + .unique() + .mix(Channel.of(workflowVersionToYAML())) +} + +// +// Get workflow summary for MultiQC +// +def paramsSummaryMultiqc(summary_params) { + def summary_section = '' + for (group in summary_params.keySet()) { + def group_params = summary_params.get(group) // This gets the parameters of that particular group + if (group_params) { + summary_section += "

$group

\n" + summary_section += "
\n" + for (param in group_params.keySet()) { + summary_section += "
$param
${group_params.get(param) ?: 'N/A'}
\n" + } + summary_section += "
\n" + } + } + + String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" + + return yaml_file_text +} + +// +// nf-core logo +// +def nfCoreLogo(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + String.format( + """\n + ${dashedLine(monochrome_logs)} + ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} + ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} + ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} + ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} + ${colors.green}`._,._,\'${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} + ${dashedLine(monochrome_logs)} + """.stripIndent() + ) +} + +// +// Return dashed line +// +def dashedLine(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + return "-${colors.dim}----------------------------------------------------${colors.reset}-" +} + +// +// ANSII colours used for terminal logging +// +def logColours(monochrome_logs=true) { + Map colorcodes = [:] + + // Reset / Meta + colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" + colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" + colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" + colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" + colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" + colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" + colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" + + // Regular Colors + colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" + colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" + colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" + colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" + colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" + colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" + colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" + colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" + + // Bold + colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" + colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" + colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" + colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" + colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" + colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" + colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" + colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" + + // Underline + colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" + colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" + colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" + colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" + colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" + colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" + colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" + colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" + + // High Intensity + colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" + colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" + colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" + colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" + colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" + colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" + colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" + colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" + + // Bold High Intensity + colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" + colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" + colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" + colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" + colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" + colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" + colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" + colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" + + return colorcodes +} + +// +// Attach the multiqc report to email +// +def attachMultiqcReport(multiqc_report) { + def mqc_report = null + try { + if (workflow.success) { + mqc_report = multiqc_report.getVal() + if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { + if (mqc_report.size() > 1) { + log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" + } + mqc_report = mqc_report[0] + } + } + } catch (all) { + if (multiqc_report) { + log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" + } + } + return mqc_report +} + +// +// Construct and send completion email +// +def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) { + + // Set up the e-mail variables + def subject = "[$workflow.manifest.name] Successful: $workflow.runName" + if (!workflow.success) { + subject = "[$workflow.manifest.name] FAILED: $workflow.runName" + } + + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['Date Started'] = workflow.start + misc_fields['Date Completed'] = workflow.complete + misc_fields['Pipeline script file path'] = workflow.scriptFile + misc_fields['Pipeline script hash ID'] = workflow.scriptId + if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository + if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId + if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision + misc_fields['Nextflow Version'] = workflow.nextflow.version + misc_fields['Nextflow Build'] = workflow.nextflow.build + misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp + + def email_fields = [:] + email_fields['version'] = getWorkflowVersion() + email_fields['runName'] = workflow.runName + email_fields['success'] = workflow.success + email_fields['dateComplete'] = workflow.complete + email_fields['duration'] = workflow.duration + email_fields['exitStatus'] = workflow.exitStatus + email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + email_fields['errorReport'] = (workflow.errorReport ?: 'None') + email_fields['commandLine'] = workflow.commandLine + email_fields['projectDir'] = workflow.projectDir + email_fields['summary'] = summary << misc_fields + + // On success try attach the multiqc report + def mqc_report = attachMultiqcReport(multiqc_report) + + // Check if we are only sending emails on failure + def email_address = email + if (!email && email_on_fail && !workflow.success) { + email_address = email_on_fail + } + + // Render the TXT template + def engine = new groovy.text.GStringTemplateEngine() + def tf = new File("${workflow.projectDir}/assets/email_template.txt") + def txt_template = engine.createTemplate(tf).make(email_fields) + def email_txt = txt_template.toString() + + // Render the HTML template + def hf = new File("${workflow.projectDir}/assets/email_template.html") + def html_template = engine.createTemplate(hf).make(email_fields) + def email_html = html_template.toString() + + // Render the sendmail template + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit + def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] + def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") + def sendmail_template = engine.createTemplate(sf).make(smail_fields) + def sendmail_html = sendmail_template.toString() + + // Send the HTML e-mail + Map colors = logColours(monochrome_logs) + if (email_address) { + try { + if (plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } + // Try to send HTML e-mail using sendmail + def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") + sendmail_tf.withWriter { w -> w << sendmail_html } + [ 'sendmail', '-t' ].execute() << sendmail_html + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" + } catch (all) { + // Catch failures and try with plaintext + def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] + mail_cmd.execute() << email_html + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" + } + } + + // Write summary e-mail HTML to a file + def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") + output_hf.withWriter { w -> w << email_html } + FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html"); + output_hf.delete() + + // Write summary e-mail TXT to a file + def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") + output_tf.withWriter { w -> w << email_txt } + FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt"); + output_tf.delete() +} + +// +// Print pipeline summary on completion +// +def completionSummary(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + if (workflow.success) { + if (workflow.stats.ignoredCount == 0) { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" + } else { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + } + } else { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" + } +} + +// +// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack +// +def imNotification(summary_params, hook_url) { + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) misc_fields['repository'] = workflow.repository + if (workflow.commitId) misc_fields['commitid'] = workflow.commitId + if (workflow.revision) misc_fields['revision'] = workflow.revision + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + + def msg_fields = [:] + msg_fields['version'] = getWorkflowVersion() + msg_fields['runName'] = workflow.runName + msg_fields['success'] = workflow.success + msg_fields['dateComplete'] = workflow.complete + msg_fields['duration'] = workflow.duration + msg_fields['exitStatus'] = workflow.exitStatus + msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + msg_fields['errorReport'] = (workflow.errorReport ?: 'None') + msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") + msg_fields['projectDir'] = workflow.projectDir + msg_fields['summary'] = summary << misc_fields + + // Render the JSON template + def engine = new groovy.text.GStringTemplateEngine() + // Different JSON depending on the service provider + // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format + def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" + def hf = new File("${workflow.projectDir}/assets/${json_path}") + def json_template = engine.createTemplate(hf).make(msg_fields) + def json_message = json_template.toString() + + // POST + def post = new URL(hook_url).openConnection(); + post.setRequestMethod("POST") + post.setDoOutput(true) + post.setRequestProperty("Content-Type", "application/json") + post.getOutputStream().write(json_message.getBytes("UTF-8")); + def postRC = post.getResponseCode(); + if (! postRC.equals(200)) { + log.warn(post.getErrorStream().getText()); + } +} diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml b/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml new file mode 100644 index 00000000..d08d2434 --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml @@ -0,0 +1,24 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "UTILS_NFCORE_PIPELINE" +description: Subworkflow with utility functions specific to the nf-core pipeline template +keywords: + - utility + - pipeline + - initialise + - version +components: [] +input: + - nextflow_cli_args: + type: list + description: | + Nextflow CLI positional arguments +output: + - success: + type: boolean + description: | + Dummy output to indicate success +authors: + - "@adamrtalbot" +maintainers: + - "@adamrtalbot" + - "@maxulysse" diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test new file mode 100644 index 00000000..1dc317f8 --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -0,0 +1,134 @@ + +nextflow_function { + + name "Test Functions" + script "../main.nf" + config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" + tag "subworkflows" + tag "subworkflows_nfcore" + tag "utils_nfcore_pipeline" + tag "subworkflows/utils_nfcore_pipeline" + + test("Test Function checkConfigProvided") { + + function "checkConfigProvided" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function checkProfileProvided") { + + function "checkProfileProvided" + + when { + function { + """ + input[0] = [] + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function workflowCitation") { + + function "workflowCitation" + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function nfCoreLogo") { + + function "nfCoreLogo" + + when { + function { + """ + input[0] = false + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function dashedLine") { + + function "dashedLine" + + when { + function { + """ + input[0] = false + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function without logColours") { + + function "logColours" + + when { + function { + """ + input[0] = true + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } + + test("Test Function with logColours") { + function "logColours" + + when { + function { + """ + input[0] = false + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert snapshot(function.result).match() } + ) + } + } +} diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap new file mode 100644 index 00000000..10f948e6 --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -0,0 +1,138 @@ +{ + "Test Function checkProfileProvided": { + "content": null, + "timestamp": "2024-02-09T15:43:55.145717" + }, + "Test Function checkConfigProvided": { + "content": [ + true + ], + "timestamp": "2024-01-19T11:34:13.548431224" + }, + "Test Function nfCoreLogo": { + "content": [ + "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" + ], + "timestamp": "2024-01-19T11:34:38.840454873" + }, + "Test Function workflowCitation": { + "content": [ + "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" + ], + "timestamp": "2024-01-19T11:34:22.24352016" + }, + "Test Function without logColours": { + "content": [ + { + "reset": "", + "bold": "", + "dim": "", + "underlined": "", + "blink": "", + "reverse": "", + "hidden": "", + "black": "", + "red": "", + "green": "", + "yellow": "", + "blue": "", + "purple": "", + "cyan": "", + "white": "", + "bblack": "", + "bred": "", + "bgreen": "", + "byellow": "", + "bblue": "", + "bpurple": "", + "bcyan": "", + "bwhite": "", + "ublack": "", + "ured": "", + "ugreen": "", + "uyellow": "", + "ublue": "", + "upurple": "", + "ucyan": "", + "uwhite": "", + "iblack": "", + "ired": "", + "igreen": "", + "iyellow": "", + "iblue": "", + "ipurple": "", + "icyan": "", + "iwhite": "", + "biblack": "", + "bired": "", + "bigreen": "", + "biyellow": "", + "biblue": "", + "bipurple": "", + "bicyan": "", + "biwhite": "" + } + ], + "timestamp": "2024-01-19T11:35:04.418416984" + }, + "Test Function dashedLine": { + "content": [ + "-\u001b[2m----------------------------------------------------\u001b[0m-" + ], + "timestamp": "2024-01-19T11:34:55.420000755" + }, + "Test Function with logColours": { + "content": [ + { + "reset": "\u001b[0m", + "bold": "\u001b[1m", + "dim": "\u001b[2m", + "underlined": "\u001b[4m", + "blink": "\u001b[5m", + "reverse": "\u001b[7m", + "hidden": "\u001b[8m", + "black": "\u001b[0;30m", + "red": "\u001b[0;31m", + "green": "\u001b[0;32m", + "yellow": "\u001b[0;33m", + "blue": "\u001b[0;34m", + "purple": "\u001b[0;35m", + "cyan": "\u001b[0;36m", + "white": "\u001b[0;37m", + "bblack": "\u001b[1;30m", + "bred": "\u001b[1;31m", + "bgreen": "\u001b[1;32m", + "byellow": "\u001b[1;33m", + "bblue": "\u001b[1;34m", + "bpurple": "\u001b[1;35m", + "bcyan": "\u001b[1;36m", + "bwhite": "\u001b[1;37m", + "ublack": "\u001b[4;30m", + "ured": "\u001b[4;31m", + "ugreen": "\u001b[4;32m", + "uyellow": "\u001b[4;33m", + "ublue": "\u001b[4;34m", + "upurple": "\u001b[4;35m", + "ucyan": "\u001b[4;36m", + "uwhite": "\u001b[4;37m", + "iblack": "\u001b[0;90m", + "ired": "\u001b[0;91m", + "igreen": "\u001b[0;92m", + "iyellow": "\u001b[0;93m", + "iblue": "\u001b[0;94m", + "ipurple": "\u001b[0;95m", + "icyan": "\u001b[0;96m", + "iwhite": "\u001b[0;97m", + "biblack": "\u001b[1;90m", + "bired": "\u001b[1;91m", + "bigreen": "\u001b[1;92m", + "biyellow": "\u001b[1;93m", + "biblue": "\u001b[1;94m", + "bipurple": "\u001b[1;95m", + "bicyan": "\u001b[1;96m", + "biwhite": "\u001b[1;97m" + } + ], + "timestamp": "2024-01-19T11:35:13.436366565" + } +} \ No newline at end of file diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test new file mode 100644 index 00000000..8940d32d --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test @@ -0,0 +1,29 @@ +nextflow_workflow { + + name "Test Workflow UTILS_NFCORE_PIPELINE" + script "../main.nf" + config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" + workflow "UTILS_NFCORE_PIPELINE" + tag "subworkflows" + tag "subworkflows_nfcore" + tag "utils_nfcore_pipeline" + tag "subworkflows/utils_nfcore_pipeline" + + test("Should run without failures") { + + when { + workflow { + """ + input[0] = [] + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert snapshot(workflow.out).match() } + ) + } + } +} diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap new file mode 100644 index 00000000..d07ce54c --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap @@ -0,0 +1,15 @@ +{ + "Should run without failures": { + "content": [ + { + "0": [ + true + ], + "valid_config": [ + true + ] + } + ], + "timestamp": "2024-01-19T11:35:22.538940073" + } +} \ No newline at end of file diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml b/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml new file mode 100644 index 00000000..ac8523c9 --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/utils_nfcore_pipeline: + - subworkflows/nf-core/utils_nfcore_pipeline/** diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf new file mode 100644 index 00000000..2585b65d --- /dev/null +++ b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf @@ -0,0 +1,62 @@ +// +// Subworkflow that uses the nf-validation plugin to render help text and parameter summary +// + +/* +======================================================================================== + IMPORT NF-VALIDATION PLUGIN +======================================================================================== +*/ + +include { paramsHelp } from 'plugin/nf-validation' +include { paramsSummaryLog } from 'plugin/nf-validation' +include { validateParameters } from 'plugin/nf-validation' + +/* +======================================================================================== + SUBWORKFLOW DEFINITION +======================================================================================== +*/ + +workflow UTILS_NFVALIDATION_PLUGIN { + + take: + print_help // boolean: print help + workflow_command // string: default commmand used to run pipeline + pre_help_text // string: string to be printed before help text and summary log + post_help_text // string: string to be printed after help text and summary log + validate_params // boolean: validate parameters + schema_filename // path: JSON schema file, null to use default value + + main: + + log.debug "Using schema file: ${schema_filename}" + + // Default values for strings + pre_help_text = pre_help_text ?: '' + post_help_text = post_help_text ?: '' + workflow_command = workflow_command ?: '' + + // + // Print help message if needed + // + if (print_help) { + log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text + System.exit(0) + } + + // + // Print parameter summary to stdout + // + log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text + + // + // Validate parameters relative to the parameter JSON schema + // + if (validate_params){ + validateParameters(parameters_schema: schema_filename) + } + + emit: + dummy_emit = true +} diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml new file mode 100644 index 00000000..3d4a6b04 --- /dev/null +++ b/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml @@ -0,0 +1,44 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "UTILS_NFVALIDATION_PLUGIN" +description: Use nf-validation to initiate and validate a pipeline +keywords: + - utility + - pipeline + - initialise + - validation +components: [] +input: + - print_help: + type: boolean + description: | + Print help message and exit + - workflow_command: + type: string + description: | + The command to run the workflow e.g. "nextflow run main.nf" + - pre_help_text: + type: string + description: | + Text to print before the help message + - post_help_text: + type: string + description: | + Text to print after the help message + - validate_params: + type: boolean + description: | + Validate the parameters and error if invalid. + - schema_filename: + type: string + description: | + The filename of the schema to validate against. +output: + - dummy_emit: + type: boolean + description: | + Dummy emit to make nf-core subworkflows lint happy +authors: + - "@adamrtalbot" +maintainers: + - "@adamrtalbot" + - "@maxulysse" diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test new file mode 100644 index 00000000..517ee54e --- /dev/null +++ b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test @@ -0,0 +1,200 @@ +nextflow_workflow { + + name "Test Workflow UTILS_NFVALIDATION_PLUGIN" + script "../main.nf" + workflow "UTILS_NFVALIDATION_PLUGIN" + tag "subworkflows" + tag "subworkflows_nfcore" + tag "plugin/nf-validation" + tag "'plugin/nf-validation'" + tag "utils_nfvalidation_plugin" + tag "subworkflows/utils_nfvalidation_plugin" + + test("Should run nothing") { + + when { + + params { + monochrome_logs = true + test_data = '' + } + + workflow { + """ + help = false + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should run help") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.exitStatus == 0 }, + { assert workflow.stdout.any { it.contains('Input/output options') } }, + { assert workflow.stdout.any { it.contains('--outdir') } } + ) + } + } + + test("Should run help with command") { + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = "nextflow run noorg/doesntexist" + pre_help_text = null + post_help_text = null + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.exitStatus == 0 }, + { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, + { assert workflow.stdout.any { it.contains('Input/output options') } }, + { assert workflow.stdout.any { it.contains('--outdir') } } + ) + } + } + + test("Should run help with extra text") { + + + when { + + params { + monochrome_logs = true + test_data = '' + } + workflow { + """ + help = true + workflow_command = "nextflow run noorg/doesntexist" + pre_help_text = "pre-help-text" + post_help_text = "post-help-text" + validate_params = false + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.success }, + { assert workflow.exitStatus == 0 }, + { assert workflow.stdout.any { it.contains('pre-help-text') } }, + { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, + { assert workflow.stdout.any { it.contains('Input/output options') } }, + { assert workflow.stdout.any { it.contains('--outdir') } }, + { assert workflow.stdout.any { it.contains('post-help-text') } } + ) + } + } + + test("Should validate params") { + + when { + + params { + monochrome_logs = true + test_data = '' + outdir = 1 + } + workflow { + """ + help = false + workflow_command = null + pre_help_text = null + post_help_text = null + validate_params = true + schema_filename = "$moduleTestDir/nextflow_schema.json" + + input[0] = help + input[1] = workflow_command + input[2] = pre_help_text + input[3] = post_help_text + input[4] = validate_params + input[5] = schema_filename + """ + } + } + + then { + assertAll( + { assert workflow.failed }, + { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } } + ) + } + } +} \ No newline at end of file diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json new file mode 100644 index 00000000..7626c1c9 --- /dev/null +++ b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json @@ -0,0 +1,96 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", + "title": ". pipeline parameters", + "description": "", + "type": "object", + "definitions": { + "input_output_options": { + "title": "Input/output options", + "type": "object", + "fa_icon": "fas fa-terminal", + "description": "Define where the pipeline should find input data and save output data.", + "required": ["outdir"], + "properties": { + "validate_params": { + "type": "boolean", + "description": "Validate parameters?", + "default": true, + "hidden": true + }, + "outdir": { + "type": "string", + "format": "directory-path", + "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", + "fa_icon": "fas fa-folder-open" + }, + "test_data_base": { + "type": "string", + "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules", + "description": "Base for test data directory", + "hidden": true + }, + "test_data": { + "type": "string", + "description": "Fake test data param", + "hidden": true + } + } + }, + "generic_options": { + "title": "Generic options", + "type": "object", + "fa_icon": "fas fa-file-import", + "description": "Less common options for the pipeline, typically set in a config file.", + "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", + "properties": { + "help": { + "type": "boolean", + "description": "Display help text.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, + "version": { + "type": "boolean", + "description": "Display version and exit.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, + "logo": { + "type": "boolean", + "default": true, + "description": "Display nf-core logo in console output.", + "fa_icon": "fas fa-image", + "hidden": true + }, + "singularity_pull_docker_container": { + "type": "boolean", + "description": "Pull Singularity container from Docker?", + "hidden": true + }, + "publish_dir_mode": { + "type": "string", + "default": "copy", + "description": "Method used to save pipeline results to output directory.", + "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", + "fa_icon": "fas fa-copy", + "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], + "hidden": true + }, + "monochrome_logs": { + "type": "boolean", + "description": "Use monochrome_logs", + "hidden": true + } + } + } + }, + "allOf": [ + { + "$ref": "#/definitions/input_output_options" + }, + { + "$ref": "#/definitions/generic_options" + } + ] +} diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml new file mode 100644 index 00000000..60b1cfff --- /dev/null +++ b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml @@ -0,0 +1,2 @@ +subworkflows/utils_nfvalidation_plugin: + - subworkflows/nf-core/utils_nfvalidation_plugin/** From 8b19f9ae6f95e5966760e43fbd4ccc89362659cf Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 16:04:29 +0100 Subject: [PATCH 09/20] Update missing subworkflows --- .../utils_nfcore_proteinfold_pipeline/main.nf | 204 ++++++++++++++++++ .../tests/nextflow.config | 9 + .../tests/nextflow.config | 9 + 3 files changed, 222 insertions(+) create mode 100644 subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf create mode 100644 subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config create mode 100644 subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config diff --git a/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf b/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf new file mode 100644 index 00000000..e605955a --- /dev/null +++ b/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf @@ -0,0 +1,204 @@ +// +// Subworkflow with functionality specific to the nf-core/pipeline pipeline +// + +import groovy.json.JsonSlurper + +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +*/ + +include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin' +include { paramsSummaryMap } from 'plugin/nf-validation' +include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' +include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' +include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' +include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' +include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' +include { imNotification } from '../../nf-core/utils_nfcore_pipeline' +include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' +include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline' + +/* +======================================================================================== + SUBWORKFLOW TO INITIALISE PIPELINE +======================================================================================== +*/ + +workflow PIPELINE_INITIALISATION { + + take: + version // boolean: Display version and exit + help // boolean: Display help text + validate_params // boolean: Boolean whether to validate parameters against the schema at runtime + monochrome_logs // boolean: Do not use coloured log outputs + nextflow_cli_args // array: List of positional nextflow CLI args + outdir // string: The output directory where the results will be saved + + main: + // + // Print version and exit if required and dump pipeline parameters to JSON file + // + UTILS_NEXTFLOW_PIPELINE ( + version, + true, + outdir, + workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 + ) + + // + // Validate parameters and generate parameter summary to stdout + // + pre_help_text = nfCoreLogo(monochrome_logs) + post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs) + def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --genome GRCh37 --outdir " + UTILS_NFVALIDATION_PLUGIN ( + help, + workflow_command, + pre_help_text, + post_help_text, + validate_params, + "nextflow_schema.json" + ) + +// + // Check config provided to the pipeline + // + UTILS_NFCORE_PIPELINE ( + nextflow_cli_args + ) + + // // TODO: remove + // // Custom validation for pipeline parameters + // // + // validateInputParameters() + +} + +/* +======================================================================================== + SUBWORKFLOW FOR PIPELINE COMPLETION +======================================================================================== +*/ + +workflow PIPELINE_COMPLETION { + + take: + email // string: email address + email_on_fail // string: email address sent on pipeline failure + plaintext_email // boolean: Send plain-text email instead of HTML + outdir // path: Path to output directory where results will be published + monochrome_logs // boolean: Disable ANSI colour codes in log output + hook_url // string: hook URL for notifications + multiqc_report // string: Path to MultiQC report + + main: + + summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + + // + // Completion email and summary + // + workflow.onComplete { + if (email || email_on_fail) { + completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) + } + + completionSummary(monochrome_logs) + + if (hook_url) { + imNotification(summary_params, hook_url) + } + } +} + +/* +======================================================================================== + FUNCTIONS +======================================================================================== +*/ + +// +// Get link to Colabfold Alphafold2 parameters +// +def getColabfoldAlphafold2Params() { + def link = null + if (params.colabfold_alphafold2_params_tags) { + if (params.colabfold_alphafold2_params_tags.containsKey(params.colabfold_model_preset.toString())) { + link = "https://storage.googleapis.com/alphafold/" + params.colabfold_alphafold2_params_tags[ params.colabfold_model_preset.toString() ] + '.tar' + } + } + return link +} + +// +// Get path to Colabfold Alphafold2 parameters +// +def getColabfoldAlphafold2ParamsPath() { + def path = null + println ("params.colabfold_alphafold2_params_tags...................${params.colabfold_alphafold2_params_tags}") + params.colabfold_model_preset.toString() + if (params.colabfold_alphafold2_params_tags) { + if (params.colabfold_alphafold2_params_tags.containsKey(params.colabfold_model_preset.toString())) { + path = "${params.colabfold_db}/params/" + params.colabfold_alphafold2_params_tags[ params.colabfold_model_preset.toString() ] + println ("path...................${path}") + } + } + return path +} + +// +// Generate methods description for MultiQC +// +def toolCitationText() { + // TODO nf-core: Optionally add in-text citation tools to this list. + // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "", + // Uncomment function in methodsDescriptionText to render in MultiQC report + def citation_text = [ + "Tools used in the workflow included:", + "FastQC (Andrews 2010),", + "MultiQC (Ewels et al. 2016)", + "." + ].join(' ').trim() + + return citation_text +} + +def toolBibliographyText() { + // TODO nf-core: Optionally add bibliographic entries to this list. + // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", + // Uncomment function in methodsDescriptionText to render in MultiQC report + def reference_text = [ + "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ", + "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • " + ].join(' ').trim() + + return reference_text +} + +def methodsDescriptionText(mqc_methods_yaml) { + // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file + def meta = [:] + meta.workflow = workflow.toMap() + meta["manifest_map"] = workflow.manifest.toMap() + + // Pipeline DOI + meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" + meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " + + // Tool references + meta["tool_citations"] = "" + meta["tool_bibliography"] = "" + + // TODO nf-core: Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! + // meta["tool_citations"] = toolCitationText().replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") + // meta["tool_bibliography"] = toolBibliographyText() + def methods_text = mqc_methods_yaml.text + + def engine = new groovy.text.SimpleTemplateEngine() + def description_html = engine.createTemplate(methods_text).make(meta) + + return description_html.toString() +} \ No newline at end of file diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config new file mode 100644 index 00000000..53574ffe --- /dev/null +++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config @@ -0,0 +1,9 @@ +manifest { + name = 'nextflow_workflow' + author = """nf-core""" + homePage = 'https://127.0.0.1' + description = """Dummy pipeline""" + nextflowVersion = '!>=23.04.0' + version = '9.9.9' + doi = 'https://doi.org/10.5281/zenodo.5070524' +} \ No newline at end of file diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config b/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config new file mode 100644 index 00000000..d0a926bf --- /dev/null +++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config @@ -0,0 +1,9 @@ +manifest { + name = 'nextflow_workflow' + author = """nf-core""" + homePage = 'https://127.0.0.1' + description = """Dummy pipeline""" + nextflowVersion = '!>=23.04.0' + version = '9.9.9' + doi = 'https://doi.org/10.5281/zenodo.5070524' +} From e18446046b81c85ccc5dc102103ae0b0ba417d3d Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 16:05:07 +0100 Subject: [PATCH 10/20] Add take to prepare subworkflow --- subworkflows/local/prepare_alphafold2_dbs.nf | 69 ++++++++++++-------- subworkflows/local/prepare_colabfold_dbs.nf | 30 ++++++--- subworkflows/local/prepare_esmfold_dbs.nf | 3 +- 3 files changed, 66 insertions(+), 36 deletions(-) diff --git a/subworkflows/local/prepare_alphafold2_dbs.nf b/subworkflows/local/prepare_alphafold2_dbs.nf index c085d6f4..d3efffbf 100644 --- a/subworkflows/local/prepare_alphafold2_dbs.nf +++ b/subworkflows/local/prepare_alphafold2_dbs.nf @@ -18,98 +18,115 @@ include { COMBINE_UNIPROT } from '../../modules/local/combine_uniprot' include { DOWNLOAD_PDBMMCIF } from '../../modules/local/download_pdbmmcif' workflow PREPARE_ALPHAFOLD2_DBS { + + take: + alphafold2_db // directory: path to alphafold2 DBs + full_dbs // boolean: Use full databases (otherwise reduced version) + bfd_path // directory: /path/to/bfd/ + small_bfd_path // directory: /path/to/small_bfd/ + alphafold2_params_path // directory: /path/to/alphafold2/params/ + mgnify_path // directory: /path/to/mgnify/ + pdb70_path // directory: /path/to/pdb70/ + pdb_mmcif_path // directory: /path/to/pdb_mmcif/ + uniref30_alphafold2_path // directory: /path/to/uniref30/alphafold2/ + uniref90_path // directory: /path/to/uniref90/ + pdb_seqres_path // directory: /path/to/pdb_seqres/ + uniprot_path // directory: /path/to/uniprot/ + uniprot_sprot // string: Specifies the link to download uniprot_sprot + uniprot_trembl + main: ch_bfd = Channel.empty() ch_small_bfd = Channel.empty() ch_versions = Channel.empty() - if (params.alphafold2_db) { - if (params.full_dbs) { - ch_bfd = file( params.bfd_path ) + if (alphafold2_db) { + if (full_dbs) { + ch_bfd = file( bfd_path ) ch_small_bfd = file( "${projectDir}/assets/dummy_db" ) } else { ch_bfd = file( "${projectDir}/assets/dummy_db" ) - ch_small_bfd = file( params.small_bfd_path ) + ch_small_bfd = file( small_bfd_path ) } - ch_params = file( params.alphafold2_params_path ) - ch_mgnify = file( params.mgnify_path ) - ch_pdb70 = file( params.pdb70_path, type: 'dir' ) - ch_mmcif_files = file( params.pdb_mmcif_path, type: 'dir' ) - ch_mmcif_obsolete = file( params.pdb_mmcif_path, type: 'file' ) + ch_params = file( alphafold2_params_path ) + ch_mgnify = file( mgnify_path ) + ch_pdb70 = file( pdb70_path, type: 'dir' ) + ch_mmcif_files = file( pdb_mmcif_path, type: 'dir' ) + ch_mmcif_obsolete = file( pdb_mmcif_path, type: 'file' ) ch_mmcif = ch_mmcif_files + ch_mmcif_obsolete - ch_uniref30 = file( params.uniref30_alphafold2_path, type: 'any' ) - ch_uniref90 = file( params.uniref90_path ) - ch_pdb_seqres = file( params.pdb_seqres_path ) - ch_uniprot = file( params.uniprot_path ) + ch_uniref30 = file( uniref30_alphafold2_path, type: 'any' ) + ch_uniref90 = file( uniref90_path ) + ch_pdb_seqres = file( pdb_seqres_path ) + ch_uniprot = file( uniprot_path ) } else { - if (params.full_dbs) { + if (full_dbs) { ARIA2_BFD( - params.bfd + bfd ) ch_bfd = ARIA2_BFD.out.db ch_versions = ch_versions.mix(ARIA2_BFD.out.versions) } else { ARIA2_SMALL_BFD( - params.small_bfd + small_bfd ) ch_small_bfd = ARIA2_SMALL_BFD.out.db ch_versions = ch_versions.mix(ARIA2_SMALL_BFD.out.versions) } ARIA2_ALPHAFOLD2_PARAMS( - params.alphafold2_params + alphafold2_params ) ch_params = ARIA2_ALPHAFOLD2_PARAMS.out.db ch_versions = ch_versions.mix(ARIA2_ALPHAFOLD2_PARAMS.out.versions) ARIA2_MGNIFY( - params.mgnify + mgnify ) ch_mgnify = ARIA2_MGNIFY.out.db ch_versions = ch_versions.mix(ARIA2_MGNIFY.out.versions) ARIA2_PDB70( - params.pdb70 + pdb70 ) ch_pdb70 = ARIA2_PDB70.out.db ch_versions = ch_versions.mix(ARIA2_PDB70.out.versions) DOWNLOAD_PDBMMCIF( - params.pdb_mmcif, - params.pdb_obsolete + pdb_mmcif, + pdb_obsolete ) ch_mmcif = DOWNLOAD_PDBMMCIF.out.ch_db ch_versions = ch_versions.mix(DOWNLOAD_PDBMMCIF.out.versions) ARIA2_UNIREF30( - params.uniref30_alphafold2 + uniref30_alphafold2 ) ch_uniref30 = ARIA2_UNIREF30.out.db ch_versions = ch_versions.mix(ARIA2_UNIREF30.out.versions) ARIA2_UNIREF90( - params.uniref90 + uniref90 ) ch_uniref90 = ARIA2_UNIREF90.out.db ch_versions = ch_versions.mix(ARIA2_UNIREF90.out.versions) ARIA2 ( - params.pdb_seqres + pdb_seqres ) ch_pdb_seqres = ARIA2.out.downloaded_file ch_versions = ch_versions.mix(ARIA2.out.versions) ARIA2_UNIPROT_SPROT( - params.uniprot_sprot + uniprot_sprot ) ch_versions = ch_versions.mix(ARIA2_UNIPROT_SPROT.out.versions) ARIA2_UNIPROT_TREMBL( - params.uniprot_trembl + uniprot_trembl ) ch_versions = ch_versions.mix(ARIA2_UNIPROT_TREMBL.out.versions) COMBINE_UNIPROT ( diff --git a/subworkflows/local/prepare_colabfold_dbs.nf b/subworkflows/local/prepare_colabfold_dbs.nf index fd189f3c..97a23dad 100644 --- a/subworkflows/local/prepare_colabfold_dbs.nf +++ b/subworkflows/local/prepare_colabfold_dbs.nf @@ -11,29 +11,41 @@ include { MMSEQS_TSV2EXPROFILEDB as MMSEQS_TSV2EXPROFILEDB_COLABFOLDDB } from '. include { MMSEQS_TSV2EXPROFILEDB as MMSEQS_TSV2EXPROFILEDB_UNIPROT30 } from '../../modules/nf-core/mmseqs/tsv2exprofiledb/main' workflow PREPARE_COLABFOLD_DBS { + + take: + colabfold_db // directory: path/to/colabfold/DBs and params + colabfold_server // string: Specifies the server to use for colabfold + colabfold_alphafold2_params_path // directory: /path/to/colabfold/alphafold2/params/ + colabfold_db_path // directory: /path/to/colabfold/db/ + uniref30_colabfold_path // directory: /path/to/uniref30/colabfold/ + colabfold_alphafold2_params // string: Specifies the link to download colabfold alphafold2 params + colabfold_db_link // string: Specifies the link to download colabfold db + uniref30_colabfold_link // string: Specifies the link to download uniref30 + create_colabfold_index // boolean: Create index for colabfold db + main: ch_params = Channel.empty() ch_colabfold_db = Channel.empty() ch_uniref30 = Channel.empty() ch_versions = Channel.empty() - if (params.colabfold_db) { - ch_params = file( params.colabfold_alphafold2_params_path, type: 'any' ) - if (params.colabfold_server == 'local') { - ch_colabfold_db = file( params.colabfold_db_path, type: 'any' ) - ch_uniref30 = file( params.uniref30_colabfold_path , type: 'any' ) + if (colabfold_db) { + ch_params = file( colabfold_alphafold2_params_path, type: 'any' ) + if (colabfold_server == 'local') { + ch_colabfold_db = file( colabfold_db_path, type: 'any' ) + ch_uniref30 = file( uniref30_colabfold_path , type: 'any' ) } } else { ARIA2_COLABFOLD_PARAMS ( - params.colabfold_alphafold2_params + colabfold_alphafold2_params ) ch_params = ARIA2_COLABFOLD_PARAMS.out.db ch_versions = ch_versions.mix(ARIA2_COLABFOLD_PARAMS.out.versions) if (params.colabfold_server == 'local') { ARIA2_COLABFOLD_DB ( - params.colabfold_db_link + colabfold_db_link ) ch_versions = ch_versions.mix(ARIA2_COLABFOLD_DB.out.versions) @@ -52,7 +64,7 @@ workflow PREPARE_COLABFOLD_DBS { } ARIA2_UNIREF30( - params.uniref30 + uniref30_colabfold_link ) ch_versions = ch_versions.mix(ARIA2_UNIREF30.out.versions) @@ -62,7 +74,7 @@ workflow PREPARE_COLABFOLD_DBS { ch_uniref30 = MMSEQS_TSV2EXPROFILEDB_UNIPROT30.out.db_exprofile ch_versions = ch_versions.mix(MMSEQS_TSV2EXPROFILEDB_UNIPROT30.out.versions) - if (params.create_colabfold_index) { + if (create_colabfold_index) { MMSEQS_CREATEINDEX_UNIPROT30 ( MMSEQS_TSV2EXPROFILEDB_UNIPROT30.out.db_exprofile ) diff --git a/subworkflows/local/prepare_esmfold_dbs.nf b/subworkflows/local/prepare_esmfold_dbs.nf index b6f3a1ff..59b43ddf 100644 --- a/subworkflows/local/prepare_esmfold_dbs.nf +++ b/subworkflows/local/prepare_esmfold_dbs.nf @@ -9,7 +9,8 @@ include { ARIA2 as ARIA2_ESM2_T36_3B_UR50D_CONTACT_REGRESSION } from '../../modu workflow PREPARE_ESMFOLD_DBS { take: - esmfold_db_path // directory: /path/to/esmfold/db/ + esmfold_db // directory: /path/to/esmfold/db/ + esmfold_params_path // directory: /path/to/esmfold/params/ esmfold_3B_v1 // string: Specifies the link to download esmfold 3B v1 esm2_t36_3B_UR50D // string: Specifies the link to download esm2 t36 3B UR50D esm2_t36_3B_UR50D_contact_regression // string: Specifies the link to download esm2 t36 3B UR50D contact regression From 896cf721bcabc12516e2c95b8b23f8008c28193d Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 16:25:19 +0100 Subject: [PATCH 11/20] Fix colabfold workflow --- workflows/colabfold.nf | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/workflows/colabfold.nf b/workflows/colabfold.nf index 04aab71f..e2eed81f 100644 --- a/workflows/colabfold.nf +++ b/workflows/colabfold.nf @@ -70,8 +70,8 @@ workflow COLABFOLD { MULTIFASTA_TO_CSV.out.input_csv, colabfold_model_preset, ch_colabfold_params, - ch_colabfold_db, - ch_uniref30, + [], + [], num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) @@ -80,8 +80,8 @@ workflow COLABFOLD { ch_fasta, colabfold_model_preset, ch_colabfold_params, - ch_colabfold_db, - ch_uniref30, + [], + [], num_recycle ) ch_versions = ch_versions.mix(COLABFOLD_BATCH.out.versions) From 2d8bcae02dd7b4d9534282fef0b5b710de26fec3 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 21:10:40 +0100 Subject: [PATCH 12/20] Remove prints --- subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf | 2 -- 1 file changed, 2 deletions(-) diff --git a/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf b/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf index e605955a..5aa15e8d 100644 --- a/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf +++ b/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf @@ -138,12 +138,10 @@ def getColabfoldAlphafold2Params() { // def getColabfoldAlphafold2ParamsPath() { def path = null - println ("params.colabfold_alphafold2_params_tags...................${params.colabfold_alphafold2_params_tags}") params.colabfold_model_preset.toString() if (params.colabfold_alphafold2_params_tags) { if (params.colabfold_alphafold2_params_tags.containsKey(params.colabfold_model_preset.toString())) { path = "${params.colabfold_db}/params/" + params.colabfold_alphafold2_params_tags[ params.colabfold_model_preset.toString() ] - println ("path...................${path}") } } return path From d779379bd7d4c5fe6ebb42e8233e20f47ec4327f Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 21:11:09 +0100 Subject: [PATCH 13/20] Remove lib folder --- lib/NfcoreTemplate.groovy | 356 ---------------------------------- lib/Utils.groovy | 47 ----- lib/WorkflowAlphafold2.groovy | 100 ---------- lib/WorkflowColabfold.groovy | 68 ------- lib/WorkflowEsmfold.groovy | 68 ------- lib/WorkflowMain.groovy | 99 ---------- 6 files changed, 738 deletions(-) delete mode 100755 lib/NfcoreTemplate.groovy delete mode 100644 lib/Utils.groovy delete mode 100755 lib/WorkflowAlphafold2.groovy delete mode 100755 lib/WorkflowColabfold.groovy delete mode 100644 lib/WorkflowEsmfold.groovy delete mode 100755 lib/WorkflowMain.groovy diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy deleted file mode 100755 index e248e4c3..00000000 --- a/lib/NfcoreTemplate.groovy +++ /dev/null @@ -1,356 +0,0 @@ -// -// This file holds several functions used within the nf-core pipeline template. -// - -import org.yaml.snakeyaml.Yaml -import groovy.json.JsonOutput -import nextflow.extension.FilesEx - -class NfcoreTemplate { - - // - // Check AWS Batch related parameters have been specified correctly - // - public static void awsBatch(workflow, params) { - if (workflow.profile.contains('awsbatch')) { - // Check params.awsqueue and params.awsregion have been set if running on AWSBatch - assert (params.awsqueue && params.awsregion) : "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" - // Check outdir paths to be S3 buckets if running on AWSBatch - assert params.outdir.startsWith('s3:') : "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!" - } - } - - // - // Warn if a -profile or Nextflow config has not been provided to run the pipeline - // - public static void checkConfigProvided(workflow, log) { - if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { - log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + - "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + - " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + - " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + - " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + - "Please refer to the quick start section and usage docs for the pipeline.\n " - } - } - - // - // Generate version string - // - public static String version(workflow) { - String version_string = "" - - if (workflow.manifest.version) { - def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' - version_string += "${prefix_v}${workflow.manifest.version}" - } - - if (workflow.commitId) { - def git_shortsha = workflow.commitId.substring(0, 7) - version_string += "-g${git_shortsha}" - } - - return version_string - } - - // - // Construct and send completion email - // - public static void email(workflow, params, summary_params, projectDir, log, multiqc_report=[]) { - - // Set up the e-mail variables - def subject = "[$workflow.manifest.name] Successful: $workflow.runName" - if (!workflow.success) { - subject = "[$workflow.manifest.name] FAILED: $workflow.runName" - } - - def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['Date Started'] = workflow.start - misc_fields['Date Completed'] = workflow.complete - misc_fields['Pipeline script file path'] = workflow.scriptFile - misc_fields['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository - if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId - if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision - misc_fields['Nextflow Version'] = workflow.nextflow.version - misc_fields['Nextflow Build'] = workflow.nextflow.build - misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp - - def email_fields = [:] - email_fields['version'] = NfcoreTemplate.version(workflow) - email_fields['runName'] = workflow.runName - email_fields['success'] = workflow.success - email_fields['dateComplete'] = workflow.complete - email_fields['duration'] = workflow.duration - email_fields['exitStatus'] = workflow.exitStatus - email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - email_fields['errorReport'] = (workflow.errorReport ?: 'None') - email_fields['commandLine'] = workflow.commandLine - email_fields['projectDir'] = workflow.projectDir - email_fields['summary'] = summary << misc_fields - - // On success try attach the multiqc report - def mqc_report = null - try { - if (workflow.success) { - mqc_report = multiqc_report.getVal() - if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { - if (mqc_report.size() > 1) { - log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" - } - mqc_report = mqc_report[0] - } - } - } catch (all) { - if (multiqc_report) { - log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" - } - } - - // Check if we are only sending emails on failure - def email_address = params.email - if (!params.email && params.email_on_fail && !workflow.success) { - email_address = params.email_on_fail - } - - // Render the TXT template - def engine = new groovy.text.GStringTemplateEngine() - def tf = new File("$projectDir/assets/email_template.txt") - def txt_template = engine.createTemplate(tf).make(email_fields) - def email_txt = txt_template.toString() - - // Render the HTML template - def hf = new File("$projectDir/assets/email_template.html") - def html_template = engine.createTemplate(hf).make(email_fields) - def email_html = html_template.toString() - - // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] - def sf = new File("$projectDir/assets/sendmail_template.txt") - def sendmail_template = engine.createTemplate(sf).make(smail_fields) - def sendmail_html = sendmail_template.toString() - - // Send the HTML e-mail - Map colors = logColours(params.monochrome_logs) - if (email_address) { - try { - if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } - // Try to send HTML e-mail using sendmail - def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") - sendmail_tf.withWriter { w -> w << sendmail_html } - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" - } catch (all) { - // Catch failures and try with plaintext - def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] - if ( mqc_report != null && mqc_report.size() <= max_multiqc_email_size.toBytes() ) { - mail_cmd += [ '-A', mqc_report ] - } - mail_cmd.execute() << email_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" - } - } - - // Write summary e-mail HTML to a file - def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") - output_hf.withWriter { w -> w << email_html } - FilesEx.copyTo(output_hf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.html"); - output_hf.delete() - - // Write summary e-mail TXT to a file - def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") - output_tf.withWriter { w -> w << email_txt } - FilesEx.copyTo(output_tf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.txt"); - output_tf.delete() - } - - // - // Construct and send a notification to a web server as JSON - // e.g. Microsoft Teams and Slack - // - public static void IM_notification(workflow, params, summary_params, projectDir, log) { - def hook_url = params.hook_url - - def summary = [:] - for (group in summary_params.keySet()) { - summary << summary_params[group] - } - - def misc_fields = [:] - misc_fields['start'] = workflow.start - misc_fields['complete'] = workflow.complete - misc_fields['scriptfile'] = workflow.scriptFile - misc_fields['scriptid'] = workflow.scriptId - if (workflow.repository) misc_fields['repository'] = workflow.repository - if (workflow.commitId) misc_fields['commitid'] = workflow.commitId - if (workflow.revision) misc_fields['revision'] = workflow.revision - misc_fields['nxf_version'] = workflow.nextflow.version - misc_fields['nxf_build'] = workflow.nextflow.build - misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp - - def msg_fields = [:] - msg_fields['version'] = NfcoreTemplate.version(workflow) - msg_fields['runName'] = workflow.runName - msg_fields['success'] = workflow.success - msg_fields['dateComplete'] = workflow.complete - msg_fields['duration'] = workflow.duration - msg_fields['exitStatus'] = workflow.exitStatus - msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - msg_fields['errorReport'] = (workflow.errorReport ?: 'None') - msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "") - msg_fields['projectDir'] = workflow.projectDir - msg_fields['summary'] = summary << misc_fields - - // Render the JSON template - def engine = new groovy.text.GStringTemplateEngine() - // Different JSON depending on the service provider - // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format - def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json" - def hf = new File("$projectDir/assets/${json_path}") - def json_template = engine.createTemplate(hf).make(msg_fields) - def json_message = json_template.toString() - - // POST - def post = new URL(hook_url).openConnection(); - post.setRequestMethod("POST") - post.setDoOutput(true) - post.setRequestProperty("Content-Type", "application/json") - post.getOutputStream().write(json_message.getBytes("UTF-8")); - def postRC = post.getResponseCode(); - if (! postRC.equals(200)) { - log.warn(post.getErrorStream().getText()); - } - } - - // - // Dump pipeline parameters in a json file - // - public static void dump_parameters(workflow, params) { - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def filename = "params_${timestamp}.json" - def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") - def jsonStr = JsonOutput.toJson(params) - temp_pf.text = JsonOutput.prettyPrint(jsonStr) - - FilesEx.copyTo(temp_pf.toPath(), "${params.outdir}/pipeline_info/params_${timestamp}.json") - temp_pf.delete() - } - - // - // Print pipeline summary on completion - // - public static void summary(workflow, params, log) { - Map colors = logColours(params.monochrome_logs) - if (workflow.success) { - if (workflow.stats.ignoredCount == 0) { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" - } - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" - } - } - - // - // ANSII Colours used for terminal logging - // - public static Map logColours(Boolean monochrome_logs) { - Map colorcodes = [:] - - // Reset / Meta - colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" - colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" - colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" - colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" - colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" - colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" - colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" - - // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" - - // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" - - // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" - - // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" - - // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" - - return colorcodes - } - - // - // Does what is says on the tin - // - public static String dashedLine(monochrome_logs) { - Map colors = logColours(monochrome_logs) - return "-${colors.dim}----------------------------------------------------${colors.reset}-" - } - - // - // nf-core logo - // - public static String logo(workflow, monochrome_logs) { - Map colors = logColours(monochrome_logs) - String workflow_version = NfcoreTemplate.version(workflow) - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) - } -} diff --git a/lib/Utils.groovy b/lib/Utils.groovy deleted file mode 100644 index 8d030f4e..00000000 --- a/lib/Utils.groovy +++ /dev/null @@ -1,47 +0,0 @@ -// -// This file holds several Groovy functions that could be useful for any Nextflow pipeline -// - -import org.yaml.snakeyaml.Yaml - -class Utils { - - // - // When running with -profile conda, warn if channels have not been set-up appropriately - // - public static void checkCondaChannels(log) { - Yaml parser = new Yaml() - def channels = [] - try { - def config = parser.load("conda config --show channels".execute().text) - channels = config.channels - } catch(NullPointerException | IOException e) { - log.warn "Could not verify conda channel configuration." - return - } - - // Check that all channels are present - // This channel list is ordered by required channel priority. - def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] - def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean - - // Check that they are in the right order - def channel_priority_violation = false - def n = required_channels_in_order.size() - for (int i = 0; i < n - 1; i++) { - channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) - } - - if (channels_missing | channel_priority_violation) { - log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + - " There is a problem with your Conda configuration!\n\n" + - " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/\n" + - " The observed channel order is \n" + - " ${channels}\n" + - " but the following channel order is required:\n" + - " ${required_channels_in_order}\n" + - "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - } - } -} diff --git a/lib/WorkflowAlphafold2.groovy b/lib/WorkflowAlphafold2.groovy deleted file mode 100755 index bcf5f396..00000000 --- a/lib/WorkflowAlphafold2.groovy +++ /dev/null @@ -1,100 +0,0 @@ -// -// This file holds several functions specific to the workflow/proteinfold.nf in the nf-core/proteinfold pipeline -// - -import nextflow.Nextflow -import groovy.text.SimpleTemplateEngine - -class WorkflowAlphafold2 { - - // - // Check and validate parameters - // - public static void initialise(params, log) { } - - // - // Get workflow summary for MultiQC - // - public static String paramsSummaryMultiqc(workflow, summary) { - String summary_section = '' - for (group in summary.keySet()) { - def group_params = summary.get(group) // This gets the parameters of that particular group - if (group_params) { - summary_section += "

    $group

    \n" - summary_section += "
    \n" - for (param in group_params.keySet()) { - summary_section += "
    $param
    ${group_params.get(param) ?: 'N/A'}
    \n" - } - summary_section += "
    \n" - } - } - - String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" - return yaml_file_text - } - - // - // Generate methods description for MultiQC - // - - public static String toolCitationText(params) { - - // TODO nf-core: Optionally add in-text citation tools to this list. - // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "", - // Uncomment function in methodsDescriptionText to render in MultiQC report - def citation_text = [ - "Tools used in the workflow included:", - "FastQC (Andrews 2010),", - "MultiQC (Ewels et al. 2016)", - "." - ].join(' ').trim() - - return citation_text - } - - public static String toolBibliographyText(params) { - - // TODO Optionally add bibliographic entries to this list. - // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", - // Uncomment function in methodsDescriptionText to render in MultiQC report - def reference_text = [ - "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ", - "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • " - ].join(' ').trim() - - return reference_text - } - - public static String methodsDescriptionText(run_workflow, mqc_methods_yaml, params) { - // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file - def meta = [:] - meta.workflow = run_workflow.toMap() - meta["manifest_map"] = run_workflow.manifest.toMap() - - // Pipeline DOI - meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" - meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " - - // Tool references - meta["tool_citations"] = "" - meta["tool_bibliography"] = "" - - // TODO Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! - //meta["tool_citations"] = toolCitationText(params).replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") - //meta["tool_bibliography"] = toolBibliographyText(params) - - - def methods_text = mqc_methods_yaml.text - - def engine = new SimpleTemplateEngine() - def description_html = engine.createTemplate(methods_text).make(meta) - - return description_html - } -} diff --git a/lib/WorkflowColabfold.groovy b/lib/WorkflowColabfold.groovy deleted file mode 100755 index e84f9c19..00000000 --- a/lib/WorkflowColabfold.groovy +++ /dev/null @@ -1,68 +0,0 @@ -// -// This file holds several functions specific to the workflow/proteinfold.nf in the nf-core/proteinfold pipeline -// - -import nextflow.Nextflow -import groovy.text.SimpleTemplateEngine - -class WorkflowColabfold { - - // - // Check and validate parameters - // - public static void initialise(params, log) { } - - // - // Get workflow summary for MultiQC - // - public static String paramsSummaryMultiqc(workflow, summary) { - String summary_section = '' - for (group in summary.keySet()) { - def group_params = summary.get(group) // This gets the parameters of that particular group - if (group_params) { - summary_section += "

    $group

    \n" - summary_section += "
    \n" - for (param in group_params.keySet()) { - summary_section += "
    $param
    ${group_params.get(param) ?: 'N/A'}
    \n" - } - summary_section += "
    \n" - } - } - - String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" - return yaml_file_text - } - - public static String methodsDescriptionText(run_workflow, mqc_methods_yaml, params) { - // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file - def meta = [:] - meta.workflow = run_workflow.toMap() - meta["manifest_map"] = run_workflow.manifest.toMap() - - // Pipeline DOI - meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" - meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " - - // Tool references - meta["tool_citations"] = "" - meta["tool_bibliography"] = "" - - // TODO Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! - //meta["tool_citations"] = toolCitationText(params).replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") - //meta["tool_bibliography"] = toolBibliographyText(params) - - - def methods_text = mqc_methods_yaml.text - - def engine = new SimpleTemplateEngine() - def description_html = engine.createTemplate(methods_text).make(meta) - - return description_html - } -} diff --git a/lib/WorkflowEsmfold.groovy b/lib/WorkflowEsmfold.groovy deleted file mode 100644 index 6c38a2af..00000000 --- a/lib/WorkflowEsmfold.groovy +++ /dev/null @@ -1,68 +0,0 @@ -// -// This file holds several functions specific to the workflow/proteinfold.nf in the nf-core/proteinfold pipeline -// - -import nextflow.Nextflow -import groovy.text.SimpleTemplateEngine - -class WorkflowEsmfold { - - // - // Check and validate parameters - // - public static void initialise(params, log) { } - - // - // Get workflow summary for MultiQC - // - public static String paramsSummaryMultiqc(workflow, summary) { - String summary_section = '' - for (group in summary.keySet()) { - def group_params = summary.get(group) // This gets the parameters of that particular group - if (group_params) { - summary_section += "

    $group

    \n" - summary_section += "
    \n" - for (param in group_params.keySet()) { - summary_section += "
    $param
    ${group_params.get(param) ?: 'N/A'}
    \n" - } - summary_section += "
    \n" - } - } - - String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" - return yaml_file_text - } - - public static String methodsDescriptionText(run_workflow, mqc_methods_yaml, params) { - // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file - def meta = [:] - meta.workflow = run_workflow.toMap() - meta["manifest_map"] = run_workflow.manifest.toMap() - - // Pipeline DOI - meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" - meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " - - // Tool references - meta["tool_citations"] = "" - meta["tool_bibliography"] = "" - - // TODO Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! - //meta["tool_citations"] = toolCitationText(params).replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") - //meta["tool_bibliography"] = toolBibliographyText(params) - - - def methods_text = mqc_methods_yaml.text - - def engine = new SimpleTemplateEngine() - def description_html = engine.createTemplate(methods_text).make(meta) - - return description_html - } -} diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy deleted file mode 100755 index 0ad73200..00000000 --- a/lib/WorkflowMain.groovy +++ /dev/null @@ -1,99 +0,0 @@ -// -// This file holds several functions specific to the main.nf workflow in the nf-core/proteinfold pipeline -// - -import nextflow.Nextflow - -class WorkflowMain { - - // - // Citation string for pipeline - // - public static String citation(workflow) { - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + - // TODO nf-core: Add Zenodo DOI for pipeline after first release - //"* The pipeline\n" + - //" https://doi.org/10.5281/zenodo.XXXXXXX\n\n" + - "* The nf-core framework\n" + - " https://doi.org/10.1038/s41587-020-0439-x\n\n" + - "* Software dependencies\n" + - " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" - } - - - // - // Validate parameters and print summary to screen - // - public static void initialise(workflow, params, log, args) { - - // Print workflow version and exit on --version - if (params.version) { - String workflow_version = NfcoreTemplate.version(workflow) - log.info "${workflow.manifest.name} ${workflow_version}" - System.exit(0) - } - - // Check that a -profile or Nextflow config has been provided to run the pipeline - NfcoreTemplate.checkConfigProvided(workflow, log) - // Check that the profile doesn't contain spaces and doesn't end with a trailing comma - checkProfile(workflow.profile, args, log) - - // Check that conda channels are set-up correctly - if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { - Utils.checkCondaChannels(log) - } - - - // Check AWS batch settings - NfcoreTemplate.awsBatch(workflow, params) - } - // - // Get attribute from genome config file e.g. fasta - // - public static Object getGenomeAttribute(params, attribute) { - if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) { - if (params.genomes[ params.genome ].containsKey(attribute)) { - return params.genomes[ params.genome ][ attribute ] - } - } - return null - } - - // - // Get link to Colabfold Alphafold2 parameters - // - public static String getColabfoldAlphafold2Params(params) { - def link = null - if (params.colabfold_alphafold2_params_tags) { - if (params.colabfold_alphafold2_params_tags.containsKey(params.colabfold_model_preset.toString())) { - link = "https://storage.googleapis.com/alphafold/" + params.colabfold_alphafold2_params_tags[ params.colabfold_model_preset.toString() ] + '.tar' - } - } - return link - } - - // - // Get path to Colabfold Alphafold2 parameters - // - public static String getColabfoldAlphafold2ParamsPath(params) { - def path = null - if (params.colabfold_alphafold2_params_tags) { - if (params.colabfold_alphafold2_params_tags.containsKey(params.colabfold_model_preset.toString())) { - path = "${params.colabfold_db}/params/" + params.colabfold_alphafold2_params_tags[ params.colabfold_model_preset.toString() ] - } - } - return path - } - - // - // Exit pipeline if --profile contains spaces - // - private static void checkProfile(profile, args, log) { - if (profile.endsWith(',')) { - Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." - } - if (args[0]) { - log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." - } - } -} From 8ca4342d0cbd87f8334d984369388a26ae10ea2c Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 21:16:45 +0100 Subject: [PATCH 14/20] Update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 175fbef6..4cbd1838 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - [[#113](https://github.com/nf-core/proteinfold/pull/113)] - Include esmfold dbs for full data sets. - [[PR #114](https://github.com/nf-core/rnaseq/pull/114)] - Update paths to test dbs. - [[PR #117](https://github.com/nf-core/proteinfold/pull/117)] - Update pipeline template to [nf-core/tools 2.10](https://github.com/nf-core/tools/releases/tag/2.10). +- [PR #130](https://github.com/nf-core/proteinfold/pull/130) - Remove `lib/` directory. ## 1.0.0 - White Silver Reebok From 544fccf58fee130493d321eb0c131358232acf35 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 21:39:12 +0100 Subject: [PATCH 15/20] Make lint happy --- .devcontainer/devcontainer.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4a9bc5c7..4ecfbfe3 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -18,11 +18,11 @@ "python.linting.flake8Path": "/opt/conda/bin/flake8", "python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle", "python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle", - "python.linting.pylintPath": "/opt/conda/bin/pylint", + "python.linting.pylintPath": "/opt/conda/bin/pylint" }, // Add the IDs of extensions you want installed when the container is created. - "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"], - }, - }, + "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] + } + } } From b60cf116e4c4e734ead915f11ef125fdf3cc57a6 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 21:51:48 +0100 Subject: [PATCH 16/20] Fix lint --- main.nf | 2 +- subworkflows/local/prepare_esmfold_dbs.nf | 4 ++-- .../utils_nfcore_proteinfold_pipeline/main.nf | 2 +- workflows/alphafold2.nf | 14 +++++++------- workflows/colabfold.nf | 4 ++-- workflows/esmfold.nf | 2 +- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/main.nf b/main.nf index 4449093f..f7000ee6 100644 --- a/main.nf +++ b/main.nf @@ -124,7 +124,7 @@ workflow NFCORE_PROTEINFOLD { params.create_colabfold_index ) ch_versions = ch_versions.mix(PREPARE_COLABFOLD_DBS.out.versions) - + // // WORKFLOW: Run nf-core/colabfold workflow // diff --git a/subworkflows/local/prepare_esmfold_dbs.nf b/subworkflows/local/prepare_esmfold_dbs.nf index 59b43ddf..39c39370 100644 --- a/subworkflows/local/prepare_esmfold_dbs.nf +++ b/subworkflows/local/prepare_esmfold_dbs.nf @@ -7,8 +7,8 @@ include { ARIA2 as ARIA2_ESM2_T36_3B_UR50D } from '../../modu include { ARIA2 as ARIA2_ESM2_T36_3B_UR50D_CONTACT_REGRESSION } from '../../modules/nf-core/aria2/main' workflow PREPARE_ESMFOLD_DBS { - - take: + + take: esmfold_db // directory: /path/to/esmfold/db/ esmfold_params_path // directory: /path/to/esmfold/params/ esmfold_3B_v1 // string: Specifies the link to download esmfold 3B v1 diff --git a/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf b/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf index 5aa15e8d..4fca9a96 100644 --- a/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf +++ b/subworkflows/local/utils_nfcore_proteinfold_pipeline/main.nf @@ -199,4 +199,4 @@ def methodsDescriptionText(mqc_methods_yaml) { def description_html = engine.createTemplate(methods_text).make(meta) return description_html.toString() -} \ No newline at end of file +} diff --git a/workflows/alphafold2.nf b/workflows/alphafold2.nf index f2c95211..c03fd469 100644 --- a/workflows/alphafold2.nf +++ b/workflows/alphafold2.nf @@ -39,11 +39,11 @@ include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_prot workflow ALPHAFOLD2 { - take: + take: ch_versions // channel: [ path(versions.yml) ] full_dbs // boolean: Use full databases (otherwise reduced version) alphafold2_mode // string: Mode to run Alphafold2 in - alphafold2_model_preset // string: Specifies the model preset to use for Alphafold2 + alphafold2_model_preset // string: Specifies the model preset to use for Alphafold2 ch_alphafold2_params // channel: path(alphafold2_params) ch_bfd // channel: path(bfd) ch_small_bfd // channel: path(small_bfd) @@ -57,7 +57,7 @@ workflow ALPHAFOLD2 { main: ch_multiqc_files = Channel.empty() - + // // Create input channel from input file provided through params.input // @@ -96,7 +96,7 @@ workflow ALPHAFOLD2 { ) ch_multiqc_rep = RUN_ALPHAFOLD2.out.multiqc.collect() ch_versions = ch_versions.mix(RUN_ALPHAFOLD2.out.versions) - + } else if (alphafold2_mode == 'split_msa_prediction') { // // SUBWORKFLOW: Run Alphafold2 split mode, MSA and predicition @@ -118,7 +118,7 @@ workflow ALPHAFOLD2 { ) ch_multiqc_rep = RUN_ALPHAFOLD2_MSA.out.multiqc.collect() ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_MSA.out.versions) - + RUN_ALPHAFOLD2_PRED ( ch_fasta, full_dbs, @@ -136,9 +136,9 @@ workflow ALPHAFOLD2 { RUN_ALPHAFOLD2_MSA.out.features ) ch_multiqc_rep = RUN_ALPHAFOLD2_PRED.out.multiqc.collect() - ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_PRED.out.versions) + ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_PRED.out.versions) } - + // // Collate and save software versions // diff --git a/workflows/colabfold.nf b/workflows/colabfold.nf index e2eed81f..dd38fd0f 100644 --- a/workflows/colabfold.nf +++ b/workflows/colabfold.nf @@ -38,7 +38,7 @@ include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_prot */ workflow COLABFOLD { - + take: ch_versions // channel: [ path(versions.yml) ] colabfold_model_preset // string: Specifies the model preset to use for colabfold @@ -159,7 +159,7 @@ workflow COLABFOLD { ch_multiqc_logo.toList() ) ch_multiqc_report = MULTIQC.out.report.toList() - + emit: multiqc_report = ch_multiqc_report // channel: /path/to/multiqc_report.html versions = ch_versions // channel: [ path(versions.yml) ] diff --git a/workflows/esmfold.nf b/workflows/esmfold.nf index b372480c..4bf7e2a4 100644 --- a/workflows/esmfold.nf +++ b/workflows/esmfold.nf @@ -42,7 +42,7 @@ workflow ESMFOLD { ch_versions // channel: [ path(versions.yml) ] ch_esmfold_params // directory: /path/to/esmfold/params/ ch_num_recycle // int: Number of recycles for esmfold - + main: ch_multiqc_files = Channel.empty() From b2f4a1ef01c8ee843e5d745fc51285464d77b3fe Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 22:52:34 +0100 Subject: [PATCH 17/20] Fix lint --- .nf-core.yml | 3 +++ .../utils_nextflow_pipeline/tests/main.function.nf.test | 2 +- .../nf-core/utils_nextflow_pipeline/tests/nextflow.config | 2 +- .../nf-core/utils_nfvalidation_plugin/tests/main.nf.test | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.nf-core.yml b/.nf-core.yml index cfe39173..678bf4f9 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -3,4 +3,7 @@ lint: files_unchanged: - .github/ISSUE_TEMPLATE/bug_report.yml - pyproject.toml + - .github/workflows/branch.yml + - .github/workflows/linting_comment.yml + - .github/workflows/linting.yml multiqc_config: false diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test index 8ed4310c..68718e4f 100644 --- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test +++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test @@ -51,4 +51,4 @@ nextflow_function { ) } } -} \ No newline at end of file +} diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config index 53574ffe..d0a926bf 100644 --- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config +++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config @@ -6,4 +6,4 @@ manifest { nextflowVersion = '!>=23.04.0' version = '9.9.9' doi = 'https://doi.org/10.5281/zenodo.5070524' -} \ No newline at end of file +} diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test index 517ee54e..5784a33f 100644 --- a/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test +++ b/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test @@ -197,4 +197,4 @@ nextflow_workflow { ) } } -} \ No newline at end of file +} From 289e962e63f69ba3ccfbc411f5c8cec064ddf227 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 22:53:18 +0100 Subject: [PATCH 18/20] Fix error --- workflows/alphafold2.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/workflows/alphafold2.nf b/workflows/alphafold2.nf index c03fd469..43752a8f 100644 --- a/workflows/alphafold2.nf +++ b/workflows/alphafold2.nf @@ -116,7 +116,6 @@ workflow ALPHAFOLD2 { ch_pdb_seqres, ch_uniprot ) - ch_multiqc_rep = RUN_ALPHAFOLD2_MSA.out.multiqc.collect() ch_versions = ch_versions.mix(RUN_ALPHAFOLD2_MSA.out.versions) RUN_ALPHAFOLD2_PRED ( From 4d2dc6b7be207e7a5526103fa4ab89f5062c6c3a Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Thu, 29 Feb 2024 23:08:01 +0100 Subject: [PATCH 19/20] Make nf-core lint happy --- .nf-core.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.nf-core.yml b/.nf-core.yml index 678bf4f9..b67677ae 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -6,4 +6,6 @@ lint: - .github/workflows/branch.yml - .github/workflows/linting_comment.yml - .github/workflows/linting.yml + - .github/CONTRIBUTING.md + - .github/PULL_REQUEST_TEMPLATE.md multiqc_config: false From 663fa135905b3f87ba881acdea2a712d1157ac44 Mon Sep 17 00:00:00 2001 From: JoseEspinosa Date: Fri, 1 Mar 2024 10:48:52 +0100 Subject: [PATCH 20/20] Fix changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4cbd1838..1b017b4f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,7 +25,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - [[#113](https://github.com/nf-core/proteinfold/pull/113)] - Include esmfold dbs for full data sets. - [[PR #114](https://github.com/nf-core/rnaseq/pull/114)] - Update paths to test dbs. - [[PR #117](https://github.com/nf-core/proteinfold/pull/117)] - Update pipeline template to [nf-core/tools 2.10](https://github.com/nf-core/tools/releases/tag/2.10). -- [PR #130](https://github.com/nf-core/proteinfold/pull/130) - Remove `lib/` directory. +- [[PR #132](https://github.com/nf-core/proteinfold/pull/132)] - Remove `lib/` directory. ## 1.0.0 - White Silver Reebok