diff --git a/assets/schema_input.json b/assets/schema_input.json index 7068f08..2097a0c 100644 --- a/assets/schema_input.json +++ b/assets/schema_input.json @@ -14,25 +14,12 @@ "unique": true, "errorMessage": "Sample name must be provided and cannot contain spaces" }, - "fastq_1": { + "profile": { "type": "string", - "pattern": "^\\S+\\.f(ast)?q(\\.gz)?$", - "errorMessage": "FastQ file for reads 1 must be provided, cannot contain spaces and must have the extension: '.fq', '.fastq', '.fq.gz' or '.fastq.gz'" - }, - "fastq_2": { - "errorMessage": "FastQ file for reads 2 cannot contain spaces and must have the extension: '.fq', '.fastq', '.fq.gz' or '.fastq.gz'", - "anyOf": [ - { - "type": "string", - "pattern": "^\\S+\\.f(ast)?q(\\.gz)?$" - }, - { - "type": "string", - "maxLength": 0 - } - ] + "pattern": "^\\S+\\.json(\\.gz)?$", + "errorMessage": "Json Profile" } }, - "required": ["sample", "fastq_1"] + "required": ["sample", "profile"] } } diff --git a/conf/modules.config b/conf/modules.config index 1fbf485..08fc284 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -16,6 +16,8 @@ process { assembly_directory_name = "assembly" summary_directory_name = "summary" + locidex_merge_directory_name = [params.outdir , "locidex", "merge"].join(File.separator) + publishDir = [ path: { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }, mode: params.publish_dir_mode, @@ -54,6 +56,15 @@ process { ] } + withName: LOCIDEX_MERGE { + publishDir = [ + path: locidex_merge_directory_name, + mode: params.publish_dir_mode, + pattern: "*/*", + saveAs: { filename -> filename.equals('versions.yml') ? null : filename } + ] + } + withName: CUSTOM_DUMPSOFTWAREVERSIONS { publishDir = [ path: { "${params.outdir}/pipeline_info" }, diff --git a/main.nf b/main.nf index 466b965..a393932 100644 --- a/main.nf +++ b/main.nf @@ -17,21 +17,12 @@ nextflow.enable.dsl = 2 include { validateParameters; paramsHelp; paramsSummaryLog; fromSamplesheet } from 'plugin/nf-validation' -// Print help message if needed -if (params.help) { - def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) - def citation = '\n' + WorkflowMain.citation(workflow) + '\n' - def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" - log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs) - System.exit(0) -} - // Validate input parameters if (params.validate_params) { validateParameters() } -WorkflowMain.initialise(workflow, params, log, args) + /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -39,7 +30,7 @@ WorkflowMain.initialise(workflow, params, log, args) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { GAS_NOMENCLATURE } from './workflows/gasnomenclature' +include { GAS_NOMENCLATURE } from './workflows/gas_nomenclature' // // WORKFLOW: Run main phac-nml/gasnomenclature analysis pipeline diff --git a/modules/local/locidex/merge/main.nf b/modules/local/locidex/merge/main.nf new file mode 100644 index 0000000..bd9f3e8 --- /dev/null +++ b/modules/local/locidex/merge/main.nf @@ -0,0 +1,28 @@ +// Merge missing loci + +process LOCIDEX_MERGE { + tag 'Merge Profiles' + label 'process_medium' + + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/locidex:0.1.1--pyhdfd78af_0' : + 'quay.io/biocontainers/locidex:0.1.1--pyhdfd78af_0' }" + + input: + val input_values // [file(sample1), file(sample2), file(sample3), etc...] + + output: + path("${combined_dir}/*.tsv"), emit: combined_profiles + path("${combined_dir}/*.json"), emit: report + path "versions.yml", emit: versions + + script: + combined_dir = "merged" + """ + locidex merge -i ${input_values.join(' ')} -o ${combined_dir} + cat <<-END_VERSIONS > versions.yml + "${task.process}": + locidex merge: \$(echo \$(locidex search -V 2>&1) | sed 's/^.*locidex //' ) + END_VERSIONS + """ +} diff --git a/workflows/gas_nomenclature.nf b/workflows/gas_nomenclature.nf index 8746968..8476fc0 100644 --- a/workflows/gas_nomenclature.nf +++ b/workflows/gas_nomenclature.nf @@ -6,15 +6,6 @@ include { paramsSummaryLog; paramsSummaryMap; fromSamplesheet } from 'plugin/nf-validation' -def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) -def citation = '\n' + WorkflowMain.citation(workflow) + '\n' -def summary_params = paramsSummaryMap(workflow) - -// Print parameter summary log to screen -log.info logo + paramsSummaryLog(workflow) + citation - -WorkflowIridanextexample.initialise(params, log) - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ CONFIG FILES @@ -30,7 +21,7 @@ WorkflowIridanextexample.initialise(params, log) // // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules // -include { INPUT_CHECK } from '../subworkflows/local/input_check' + include { GENERATE_SAMPLE_JSON } from '../modules/local/generatesamplejson/main' include { SIMPLIFY_IRIDA_JSON } from '../modules/local/simplifyiridajson/main' include { IRIDA_NEXT_OUTPUT } from '../modules/local/iridanextoutput/main' @@ -47,6 +38,7 @@ include { GENERATE_SUMMARY } from '../modules/local/generatesummary/main' // MODULE: Installed directly from nf-core/modules // include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' +include { LOCIDEX_MERGE } from "../modules/local/locidex/merge/main" /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -60,45 +52,44 @@ workflow GAS_NOMENCLATURE { // Create a new channel of metadata from a sample sheet // NB: `input` corresponds to `params.input` and associated sample sheet schema - input = Channel.fromSamplesheet("input") - // Map the inputs so that they conform to the nf-core-expected "reads" format. - // Either [meta, [fastq_1]] or [meta, [fastq_1, fastq_2]] if fastq_2 exists - .map { meta, fastq_1, fastq_2 -> - fastq_2 ? tuple(meta, [ file(fastq_1), file(fastq_2) ]) : - tuple(meta, [ file(fastq_1) ])} - - ASSEMBLY_STUB ( - input - ) - ch_versions = ch_versions.mix(ASSEMBLY_STUB.out.versions) + input = Channel.fromSamplesheet("input").map { meta, profile -> tuple(meta, file(profile))}; + profiles = input.map{ + it -> it[1] + }.collect() + LOCIDEX_MERGE(profiles) + + //ASSEMBLY_STUB ( + // input + //) + //ch_versions = ch_versions.mix(ASSEMBLY_STUB.out.versions) // A channel of tuples of ({meta}, [read[0], read[1]], assembly) - ch_tuple_read_assembly = input.join(ASSEMBLY_STUB.out.assembly) - - GENERATE_SAMPLE_JSON ( - ch_tuple_read_assembly - ) - ch_versions = ch_versions.mix(GENERATE_SAMPLE_JSON.out.versions) - - GENERATE_SUMMARY ( - ch_tuple_read_assembly.collect{ [it] } - ) - ch_versions = ch_versions.mix(GENERATE_SUMMARY.out.versions) - - SIMPLIFY_IRIDA_JSON ( - GENERATE_SAMPLE_JSON.out.json - ) - ch_versions = ch_versions.mix(SIMPLIFY_IRIDA_JSON.out.versions) - ch_simplified_jsons = SIMPLIFY_IRIDA_JSON.out.simple_json.map { meta, data -> data }.collect() // Collect JSONs - - IRIDA_NEXT_OUTPUT ( - samples_data=ch_simplified_jsons - ) - ch_versions = ch_versions.mix(IRIDA_NEXT_OUTPUT.out.versions) - - CUSTOM_DUMPSOFTWAREVERSIONS ( - ch_versions.unique().collectFile(name: 'collated_versions.yml') - ) + //ch_tuple_read_assembly = input.join(ASSEMBLY_STUB.out.assembly) + + //GENERATE_SAMPLE_JSON ( + // ch_tuple_read_assembly + //) + //ch_versions = ch_versions.mix(GENERATE_SAMPLE_JSON.out.versions) + + //GENERATE_SUMMARY ( + // ch_tuple_read_assembly.collect{ [it] } + //) + //ch_versions = ch_versions.mix(GENERATE_SUMMARY.out.versions) + + //SIMPLIFY_IRIDA_JSON ( + // GENERATE_SAMPLE_JSON.out.json + //) + //ch_versions = ch_versions.mix(SIMPLIFY_IRIDA_JSON.out.versions) + //ch_simplified_jsons = SIMPLIFY_IRIDA_JSON.out.simple_json.map { meta, data -> data }.collect() // Collect JSONs + + //IRIDA_NEXT_OUTPUT ( + // samples_data=ch_simplified_jsons + //) + //ch_versions = ch_versions.mix(IRIDA_NEXT_OUTPUT.out.versions) + + //CUSTOM_DUMPSOFTWAREVERSIONS ( + // ch_versions.unique().collectFile(name: 'collated_versions.yml') + //) }