diff --git a/subworkflows/local/ancestral_gene.nf b/subworkflows/local/ancestral_gene.nf index 2bb6dd5d..866098b3 100755 --- a/subworkflows/local/ancestral_gene.nf +++ b/subworkflows/local/ancestral_gene.nf @@ -10,10 +10,10 @@ include { UCSC_BEDTOBIGBED } from '../../modules/nf-core/ucsc/bedtobigbed/main workflow ANCESTRAL_GENE { take: - busco_dir // Channel: [val(meta),/path/to/busco/output/dir] - dot_genome // Channel: [val(meta), [ datafile ]] - buscogene_as // Channel val(dot_as location) - ancestral_table // Channel val(ancestral_table location) + busco_dir // Channel: tuple [val(meta),/path/to/busco/output/dir] + dot_genome // Channel: tuple [val(meta), [ datafile ]] + buscogene_as // Channel: val(dot_as location) + ancestral_table // Channel: val(ancestral_table location) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/busco_annotation.nf b/subworkflows/local/busco_annotation.nf index 5a317ee5..1d41fcf1 100755 --- a/subworkflows/local/busco_annotation.nf +++ b/subworkflows/local/busco_annotation.nf @@ -21,12 +21,12 @@ include { ANCESTRAL_GENE } from './ancestral_gene' workflow BUSCO_ANNOTATION { take: - dot_genome // channel: [val(meta), [ datafile ]] - reference_tuple // channel: [val(meta), [ datafile ]] - lineageinfo // channel: val(lineage_db) - lineagespath // channel: val(/path/to/buscoDB) - buscogene_as // channel: val(dot_as location) - ancestral_table // channel: val(ancestral_table location) + dot_genome // Channel: tuple [val(meta), [ datafile ]] + reference_tuple // Channel: tuple [val(meta), [ datafile ]] + lineageinfo // Channel: val(lineage_db) + lineagespath // Channel: val(/path/to/buscoDB) + buscogene_as // Channel: val(dot_as location) + ancestral_table // Channel: val(ancestral_table location) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/gap_finder.nf b/subworkflows/local/gap_finder.nf index 3c51e530..f175fbd0 100755 --- a/subworkflows/local/gap_finder.nf +++ b/subworkflows/local/gap_finder.nf @@ -9,8 +9,8 @@ include { TABIX_BGZIPTABIX } from '../../modules/nf-core/tabix/bgziptabix/main' workflow GAP_FINDER { take: - reference_tuple // Channel [ val(meta), path(fasta) ] - max_scaff_size // val(size of largest scaffold in bp) + reference_tuple // Channel: tuple [ val(meta), path(fasta) ] + max_scaff_size // Channel: val(size of largest scaffold in bp) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/gene_alignment.nf b/subworkflows/local/gene_alignment.nf index fcd6bcd3..51073c03 100755 --- a/subworkflows/local/gene_alignment.nf +++ b/subworkflows/local/gene_alignment.nf @@ -15,15 +15,15 @@ include { NUC_ALIGNMENTS as CDS_ALIGNMENTS } from './nuc_alignments' workflow GENE_ALIGNMENT { take: - dot_genome // Channel [ val(meta), path(file) ] - reference_tuple // Channel [ val(meta), path(file) ] - reference_index // Channel [ val(meta), path(file) ] - max_scaff_size // Channel val(size of largest scaffold in bp) - alignment_datadir // Channel val(geneset_dir) - alignment_genesets // Channel val(geneset_id) - alignment_common // Channel val(common_name) // Not yet in use - intron_size // Channel val(50k) - as_files // Channel [ val(meta), path(file) ] + dot_genome // Channel: [ val(meta), path(file) ] + reference_tuple // Channel: [ val(meta), path(file) ] + reference_index // Channel: [ val(meta), path(file) ] + max_scaff_size // Channel: val(size of largest scaffold in bp) + alignment_datadir // Channel: val(geneset_dir) + alignment_genesets // Channel: val(geneset_id) + alignment_common // Channel: val(common_name) // Not yet in use + intron_size // Channel: val(50k) + as_files // Channel: [ val(meta), path(file) ] main: ch_versions = Channel.empty() diff --git a/subworkflows/local/generate_genome.nf b/subworkflows/local/generate_genome.nf index ea8c4723..01eded6a 100755 --- a/subworkflows/local/generate_genome.nf +++ b/subworkflows/local/generate_genome.nf @@ -9,7 +9,7 @@ include { GET_LARGEST_SCAFF } from '../../modules/local/get_largest_scaff' workflow GENERATE_GENOME { take: - reference_file // Channel path(file) + reference_file // Channel: path(file) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/hic_mapping.nf b/subworkflows/local/hic_mapping.nf index 0314bca9..084a73a4 100755 --- a/subworkflows/local/hic_mapping.nf +++ b/subworkflows/local/hic_mapping.nf @@ -28,17 +28,17 @@ include { PRETEXT_INGESTION as PRETEXT_INGEST_HIRES } from '../../subworkflows/l workflow HIC_MAPPING { take: - reference_tuple // Channel [ val(meta), path( file ) ] - reference_index // Channel [ val(meta), path( file ) ] - dot_genome // Channel [ val(meta), path( datafile ) ] - hic_reads_path // Channel [ val(meta), path( directory ) ] - assembly_id // Channel val( id ) - gap_file // Channel [ val(meta), path( file ) ] - coverage_file // Channel [ val(meta), path( file ) ] - logcoverage_file // Channel [ val(meta), path( file ) ] - telo_file // Channel [ val(meta), path( file ) ] - repeat_density_file // Channel [ val(meta), path( file ) ] - workflow_setting // val( {RAPID | FULL } ) + reference_tuple // Channel: tuple [ val(meta), path( file ) ] + reference_index // Channel: tuple [ val(meta), path( file ) ] + dot_genome // Channel: tuple [ val(meta), path( datafile ) ] + hic_reads_path // Channel: tuple [ val(meta), path( directory ) ] + assembly_id // Channel: val( id ) + gap_file // Channel: tuple [ val(meta), path( file ) ] + coverage_file // Channel: tuple [ val(meta), path( file ) ] + logcoverage_file // Channel: tuple [ val(meta), path( file ) ] + telo_file // Channel: tuple [ val(meta), path( file ) ] + repeat_density_file // Channel: tuple [ val(meta), path( file ) ] + workflow_setting // Channel: val( { RAPID | FULL } ) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/insilico_digest.nf b/subworkflows/local/insilico_digest.nf index c338fb40..fe6c0f46 100755 --- a/subworkflows/local/insilico_digest.nf +++ b/subworkflows/local/insilico_digest.nf @@ -14,10 +14,10 @@ include { UCSC_BEDTOBIGBED } from '../../modules/nf-core/ucsc/bedto workflow INSILICO_DIGEST { take: - sizefile // Channel [ val(meta), path(my.genome_file) ] - reference // Channel [ val(meta), path(reference_file) ] - ch_enzyme // Channel val( "bspq1","bsss1","DLE1" ) - dot_as // Channel val(dot_as location) + sizefile // Channel: tuple [ val(meta), path(my.genome_file) ] + reference // Channel: tuple [ val(meta), path(reference_file) ] + ch_enzyme // Channel: val( "bspq1","bsss1","DLE1" ) + dot_as // Channel: val(dot_as location) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/kmer.nf b/subworkflows/local/kmer.nf index 45d885f2..fd9d40be 100755 --- a/subworkflows/local/kmer.nf +++ b/subworkflows/local/kmer.nf @@ -55,15 +55,6 @@ workflow KMER { } .set{ ch_reads_merged } - // - // LOGIC: PREPARE FASTK INPUT - // - CAT_CAT.out.file_out - .join( ch_reads_merged ) - .map{ meta, reads_old, reads_new -> - reads_old.renameTo( reads_new ); - } - // // MODULE: COUNT KMERS // @@ -74,10 +65,10 @@ workflow KMER { // LOGIC: PREPARE MERQURYFK INPUT // FASTK_FASTK.out.hist - .combine(FASTK_FASTK.out.ktab) - .combine(reference_tuple) + .combine( FASTK_FASTK.out.ktab ) + .combine( reference_tuple ) .map{ meta_hist, hist, meta_ktab, ktab, meta_ref, primary -> - tuple( meta_hist, hist, ktab, primary, []) + tuple( meta_hist, hist, ktab, primary, [] ) } .set{ ch_merq } @@ -85,7 +76,7 @@ workflow KMER { // MODULE: USE KMER HISTOGRAM TO PRODUCE SPECTRA // MERQURYFK_MERQURYFK ( ch_merq ) - ch_versions = ch_versions.mix(MERQURYFK_MERQURYFK.out.versions.first()) + ch_versions = ch_versions.mix( MERQURYFK_MERQURYFK.out.versions.first() ) emit: merquryk_completeness = MERQURYFK_MERQURYFK.out.stats // meta, stats @@ -98,10 +89,10 @@ process GrabFiles { executor 'local' input: - tuple val(meta), path("in") + tuple val( meta ), path( "in" ) output: - tuple val(meta), path("in/*.fasta.gz") + tuple val( meta ), path( "in/*.fasta.gz" ) "true" } diff --git a/subworkflows/local/longread_coverage.nf b/subworkflows/local/longread_coverage.nf index 70a8e92d..89d8c21b 100755 --- a/subworkflows/local/longread_coverage.nf +++ b/subworkflows/local/longread_coverage.nf @@ -26,9 +26,9 @@ include { LONGREADCOVERAGESCALELOG2 } from '../../modules/local/ workflow LONGREAD_COVERAGE { take: - reference_tuple // Channel: [ val(meta), file( reference_file ) ] - dot_genome // Channel: [ val(meta), [ file( datafile ) ] ] - reads_path // Channel: [ val(meta), val( str ) ] + reference_tuple // Channel: tuple [ val(meta), file( reference_file ) ] + dot_genome // Channel: tuple [ val(meta), [ file( datafile ) ] ] + reads_path // Channel: tuple [ val(meta), val( str ) ] main: ch_versions = Channel.empty() diff --git a/subworkflows/local/nuc_alignments.nf b/subworkflows/local/nuc_alignments.nf index 8d13b8f1..75fc045b 100755 --- a/subworkflows/local/nuc_alignments.nf +++ b/subworkflows/local/nuc_alignments.nf @@ -19,11 +19,11 @@ include { PUNCHLIST } from './punchlist' workflow NUC_ALIGNMENTS { take: - reference_tuple // Channel [ val(meta), path(file) ] - reference_index // Channel [ val(meta), path(file) ] - nuc_files // Channel [ val(meta), path(file) ] - dot_genome // Channel [ val(meta), path(file) ] - intron_size // Channel val(50k) + reference_tuple // Channel: tuple [ val(meta), path(file) ] + reference_index // Channel: tuple [ val(meta), path(file) ] + nuc_files // Channel: tuple [ val(meta), path(file) ] + dot_genome // Channel: tuple [ val(meta), path(file) ] + intron_size // Channel: val(50k) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/pep_alignments.nf b/subworkflows/local/pep_alignments.nf index 8f12e91e..fca37eb2 100755 --- a/subworkflows/local/pep_alignments.nf +++ b/subworkflows/local/pep_alignments.nf @@ -12,9 +12,9 @@ include { EXTRACT_COV_IDEN } from '../../modules/local/extract_cov_iden' workflow PEP_ALIGNMENTS { take: - reference_tuple // Channel [ val(meta), path(file) ] - pep_files // Channel [ val(meta), path(file) ] - max_scaff_size // Channel val(size of largest scaffold in bp) + reference_tuple // Channel: tuple [ val(meta), path(file) ] + pep_files // Channel: tuple [ val(meta), path(file) ] + max_scaff_size // Channel: tuple val(size of largest scaffold in bp) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/pretext_ingestion.nf b/subworkflows/local/pretext_ingestion.nf index 95ecb3ab..bbd9921c 100644 --- a/subworkflows/local/pretext_ingestion.nf +++ b/subworkflows/local/pretext_ingestion.nf @@ -2,12 +2,12 @@ include { PRETEXT_GRAPH } from '../../modules/lo workflow PRETEXT_INGESTION { take: - pretext_file // tuple([sample_id], file) - gap_file // tuple([sample_id], file) - coverage_file // tuple([sample_id], file) - cov_log_file // tuple([sample_id], file) - telomere_file // tuple([sample_id], file) - repeat_cov_file // tuple([sample_id], file) + pretext_file // Channel: tuple([sample_id], file) + gap_file // Channel: tuple([sample_id], file) + coverage_file // Channel: tuple([sample_id], file) + cov_log_file // Channel: tuple([sample_id], file) + telomere_file // Channel: tuple([sample_id], file) + repeat_cov_file // Channel: tuple([sample_id], file) main: diff --git a/subworkflows/local/punchlist.nf b/subworkflows/local/punchlist.nf index 1db01a6e..954acae3 100755 --- a/subworkflows/local/punchlist.nf +++ b/subworkflows/local/punchlist.nf @@ -8,8 +8,8 @@ include { PAF2BED } from '../../modules/local/paf_to_bed' workflow PUNCHLIST { take: - reference_tuple // Channel [ val(meta), path(reference)] - merged_bam // Channel [ val(meta), path(bam_file)] + reference_tuple // Channel: tuple [ val(meta), path(reference)] + merged_bam // Channel: tuple [ val(meta), path(bam_file)] main: ch_versions = Channel.empty() diff --git a/subworkflows/local/repeat_density.nf b/subworkflows/local/repeat_density.nf index 52744f56..ecd2a37a 100755 --- a/subworkflows/local/repeat_density.nf +++ b/subworkflows/local/repeat_density.nf @@ -19,7 +19,7 @@ include { REPLACE_DOTS } from '../../modules/local/replace_ workflow REPEAT_DENSITY { take: - reference_tuple // Channel [ val(meta), path(file) ] + reference_tuple // Channel: tuple [ val(meta), path(file) ] dot_genome main: diff --git a/subworkflows/local/selfcomp.nf b/subworkflows/local/selfcomp.nf index 985d9c6c..967e9eeb 100755 --- a/subworkflows/local/selfcomp.nf +++ b/subworkflows/local/selfcomp.nf @@ -18,11 +18,11 @@ include { BEDTOOLS_MERGE } from '../../modules/nf-core/bedtools/ workflow SELFCOMP { take: - reference_tuple // Channel [ val(meta), path(reference_file) ] - dot_genome // Channel [ val(meta), [ path(datafile) ] ] - mummer_chunk // Channel val( int ) - motif_len // Channel val( int ) - selfcomp_as // Channel val( dot_as location ) + reference_tuple // Channel: tuple [ val(meta), path(reference_file) ] + dot_genome // Channel: tuple [ val(meta), [ path(datafile) ] ] + mummer_chunk // Channel: val( int ) + motif_len // Channel: val( int ) + selfcomp_as // Channel: val( dot_as location ) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/synteny.nf b/subworkflows/local/synteny.nf index 49bb0fee..c78dbda7 100755 --- a/subworkflows/local/synteny.nf +++ b/subworkflows/local/synteny.nf @@ -8,8 +8,8 @@ include { GET_SYNTENY_GENOMES } from '../../modules/local/get_synteny_genomes' workflow SYNTENY { take: - reference_tuple // Channel [ val(meta), path(file) ] - synteny_path // Channel val(meta) + reference_tuple // Channel: tuple [ val(meta), path(file) ] + synteny_path // Channel: val(meta) main: ch_versions = Channel.empty() diff --git a/subworkflows/local/telo_finder.nf b/subworkflows/local/telo_finder.nf index 36f83d46..476a74af 100755 --- a/subworkflows/local/telo_finder.nf +++ b/subworkflows/local/telo_finder.nf @@ -11,8 +11,8 @@ include { TABIX_BGZIPTABIX } from '../../modules/nf-core/tabix/bgziptab workflow TELO_FINDER { take: - max_scaff_size // val(size of largest scaffold in bp) - reference_tuple // Channel [ val(meta), path(fasta) ] + max_scaff_size // Channel: val(size of largest scaffold in bp) + reference_tuple // Channel: tuple [ val(meta), path(fasta) ] teloseq main: diff --git a/subworkflows/local/yaml_input.nf b/subworkflows/local/yaml_input.nf index 9ba195ce..3b1c882e 100755 --- a/subworkflows/local/yaml_input.nf +++ b/subworkflows/local/yaml_input.nf @@ -4,7 +4,7 @@ import org.yaml.snakeyaml.Yaml workflow YAML_INPUT { take: - input_file // input_yaml_from_commandline + input_file // params.input main: ch_versions = Channel.empty() diff --git a/workflows/treeval.nf b/workflows/treeval.nf index 8ad1b30a..e4b0bcee 100755 --- a/workflows/treeval.nf +++ b/workflows/treeval.nf @@ -164,10 +164,10 @@ workflow TREEVAL { ) ch_versions = ch_versions.mix(GAP_FINDER.out.versions) - // // - // // SUBWORKFLOW: Takes reference file, .genome file, mummer variables, motif length variable and as - // // file to generate a file containing sites of self-complementary sequnce. - // // + // + // SUBWORKFLOW: Takes reference file, .genome file, mummer variables, motif length variable and as + // file to generate a file containing sites of self-complementary sequnce. + // SELFCOMP ( YAML_INPUT.out.reference, GENERATE_GENOME.out.dot_genome,