diff --git a/subworkflows/local/profiling.nf b/subworkflows/local/profiling.nf index 236ed8df..fc76a31c 100644 --- a/subworkflows/local/profiling.nf +++ b/subworkflows/local/profiling.nf @@ -18,6 +18,31 @@ include { KRAKENUNIQ_PRELOADEDKRAKENUNIQ } from '../../modules/nf include { GANON_CLASSIFY } from '../../modules/nf-core/ganon/classify/main' include { GANON_REPORT } from '../../modules/nf-core/ganon/report/main' + +// Custom Functions + +/** +* Combine profiles with their original database, then separate into two channels. +* +* The channel elements are assumed to be tuples one of [ meta, profile ], and the +* database to be of [db_key, meta, database_file]. +* +* @param ch_profile A channel containing a meta and the profilign report of a given profiler +* @param ch_database A channel containing a key, the database meta, and the database file/folders itself +* @return A multiMap'ed output channel with two sub channels, one with the profile and the other with the db +*/ +def combineProfilesWithDatabase(ch_profile, ch_database) { + +return ch_profile + .map { meta, profile -> [meta.db_name, meta, profile] } + .combine(ch_database, by: 0) + .multiMap { + key, meta, profile, db_meta, db -> + profile: [meta, profile] + db: db + } +} + workflow PROFILING { take: reads // [ [ meta ], [ reads ] ] @@ -236,18 +261,7 @@ workflow PROFILING { .filter { meta, db -> meta.tool == 'centrifuge' } .map { meta, db -> [meta.db_name, meta, db] } - // We must combine the _results_ file to get correct output - sending the report file will - // weirdly still produce valid-looking output, however the numbers are nonsense. - // Unfortunately the Centrifuge documentation for this was unclear as to _which_ outfile - // goes into it. - ch_input_for_centrifuge_kreport = CENTRIFUGE_CENTRIFUGE.out.results - .map { meta, profile -> [meta.db_name, meta, profile] } - .combine(ch_database_for_centrifugekreport, by: 0) - .multiMap { - key, meta, profile, db_meta, db -> - profile: [meta, profile] - db: db - } + ch_input_for_centrifuge_kreport = combineProfilesWithDatabase(CENTRIFUGE_CENTRIFUGE.out.report, ch_database_for_centrifugekreport) // Generate profile CENTRIFUGE_KREPORT (ch_input_for_centrifuge_kreport.profile, ch_input_for_centrifuge_kreport.db) @@ -290,15 +304,7 @@ workflow PROFILING { .filter { meta, db -> meta.tool == 'kaiju' } .map { meta, db -> [meta.db_name, meta, db] } - ch_input_for_kaiju2table = KAIJU_KAIJU.out.results - .map { meta, profile -> [meta.db_name, meta, profile] } - .combine(ch_database_for_kaiju2table, by: 0) - .multiMap { - key, meta, profile, db_meta, db -> - profile: [meta, profile] - db: db - } - + ch_input_for_kaiju2table = combineProfilesWithDatabase(KAIJU_KAIJU.out.results, ch_database_for_kaiju2table) // Generate profile KAIJU_KAIJU2TABLE_SINGLE ( ch_input_for_kaiju2table.profile, ch_input_for_kaiju2table.db, params.kaiju_taxon_rank) ch_versions = ch_versions.mix( KAIJU_KAIJU2TABLE_SINGLE.out.versions ) diff --git a/subworkflows/local/standardisation_profiles.nf b/subworkflows/local/standardisation_profiles.nf index 3f28996e..062417c1 100644 --- a/subworkflows/local/standardisation_profiles.nf +++ b/subworkflows/local/standardisation_profiles.nf @@ -12,6 +12,30 @@ include { METAPHLAN_MERGEMETAPHLANTABLES include { MOTUS_MERGE } from '../../modules/nf-core/motus/merge/main' include { GANON_TABLE } from '../../modules/nf-core/ganon/table/main' +// Custom Functions + +/** +* Combine profiles with their original database, then separate into two channels. +* +* The channel elements are assumed to be tuples one of [ meta, profile ], and the +* database to be of [db_key, meta, database_file]. +* +* @param ch_profile A channel containing a meta and the profilign report of a given profiler +* @param ch_database A channel containing a key, the database meta, and the database file/folders itself +* @return A multiMap'ed output channel with two sub channels, one with the profile and the other with the db +*/ +def combineProfilesWithDatabase(ch_profile, ch_database) { + +return ch_profile + .map { meta, profile -> [meta.db_name, meta, profile] } + .combine(ch_database, by: 0) + .multiMap { + key, meta, profile, db_meta, db -> + profile: [meta, profile] + db: db + } +} + workflow STANDARDISATION_PROFILES { take: classifications @@ -117,14 +141,7 @@ workflow STANDARDISATION_PROFILES { [[id:it[0]], it[1]] } - ch_input_for_kaiju2tablecombine = ch_profiles_for_kaiju - .map { meta, profile -> [meta.id, meta, profile] } - .combine(ch_input_databases.kaiju.map{meta, db -> [meta.db_name, meta, db]}, by: 0) - .multiMap { - key, meta, profile, db_meta, db -> - profile: [meta, profile] - db: db - } + ch_input_for_kaiju2tablecombine = combineProfilesWithDatabase(ch_profiles_for_kaiju, ch_input_databases.kaiju) KAIJU_KAIJU2TABLE_COMBINED ( ch_input_for_kaiju2tablecombine.profile, ch_input_for_kaiju2tablecombine.db, params.kaiju_taxon_rank) ch_multiqc_files = ch_multiqc_files.mix( KAIJU_KAIJU2TABLE_COMBINED.out.summary ) @@ -172,14 +189,7 @@ workflow STANDARDISATION_PROFILES { [[id:it[0]], it[1]] } - ch_input_for_motusmerge = ch_profiles_for_motus - .map { meta, profile -> [meta.id, meta, profile] } - .combine(ch_input_databases.motus.map{meta, db -> [meta.db_name, meta, db]}, by: 0) - .multiMap { - key, meta, profile, db_meta, db -> - profile: [meta, profile] - db: db - } + ch_input_for_motusmerge = combineProfilesWithDatabase(ch_profiles_for_motus, ch_input_databases.motus) MOTUS_MERGE ( ch_input_for_motusmerge.profile, ch_input_for_motusmerge.db, motu_version ) ch_versions = ch_versions.mix( MOTUS_MERGE.out.versions )