Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- Addition of `params.multi_mapping` to change the level of multi-mapping filtering performed by PretextMap.
- This corresponds to the mapq (mapping quality) value.
- Updated `trace` scope to start collecting SummaryStat data again.
- Updated `t
- production { includeConfig 'conf/production.config' }race` scope to start collecting SummaryStat data again.

### Paramters

Expand Down
6 changes: 2 additions & 4 deletions modules/local/cram/filter_align_bwamem2_fixmate_sort/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ process CRAM_FILTER_ALIGN_BWAMEM2_FIXMATE_SORT {
task.ext.when == null || task.ext.when

script:
def args = task.ext.args ?: ''
def _args = task.ext.args ?: ''
def args1 = task.ext.args1 ?: ''
def args2 = task.ext.args2 ?: ''
def _args2 = task.ext.args2 ?: ''
def args3 = task.ext.args3 ?: ''
def args4 = task.ext.args4 ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
Expand All @@ -43,8 +43,6 @@ process CRAM_FILTER_ALIGN_BWAMEM2_FIXMATE_SORT {

stub:
def prefix = task.ext.prefix ?: "${meta.id}"
def base = "45022_3#2"
def chunkid = "1"
"""
touch ${prefix}_${base}_${chunkid}_mem.bam

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ process CRAM_FILTER_MINIMAP2_FILTER5END_FIXMATE_SORT {
task.ext.when == null || task.ext.when

script:
def args = task.ext.args ?: ''
def _args = task.ext.args ?: ''
def args1 = task.ext.args1 ?: ''
def args2 = task.ext.args2 ?: ''
def args3 = task.ext.args3 ?: ''
Expand All @@ -44,8 +44,6 @@ process CRAM_FILTER_MINIMAP2_FILTER5END_FIXMATE_SORT {

stub:
def prefix = task.ext.prefix ?: "${meta.id}"
def base = "45022_3#2"
def chunkid = "1"
"""
touch ${prefix}_${base}_${chunkid}_mm.bam

Expand Down
3 changes: 1 addition & 2 deletions modules/local/find/telomere_windows/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ process FIND_TELOMERE_WINDOWS {
def VERSION = "1.0" // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions.
def telomere_jar = task.ext.telomere_jar ?: ''
def telomere_jvm_params = task.ext.telomere_jvm_params ?: ''
def telomere_window_cut = task.ext.telomere_window_cut ?: 99.9
def telomere_window_cut = task.ext.telomere_window_cut ?: "99.9"
"""
java ${telomere_jvm_params} -cp ${projectDir}/bin/${telomere_jar} FindTelomereWindows $file $telomere_window_cut > ${prefix}.windows

Expand All @@ -35,7 +35,6 @@ process FIND_TELOMERE_WINDOWS {
stub:
def prefix = task.ext.prefix ?: "${meta.id}"
def VERSION = "1.0" // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions.
def telomere = task.ext.telomere ?: ''
"""
touch ${prefix}.windows

Expand Down
6 changes: 3 additions & 3 deletions modules/local/gawk_split_directions/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ process GAWK_SPLIT_DIRECTIONS {
def args = task.ext.args ?: '' // args is used for the main arguments of the tool
def args2 = task.ext.args2 ?: '' // args2 is used to specify a program when no program file has been given
prefix = task.ext.prefix ?: "${meta.id}"
suffix = task.ext.suffix ?: "${input.collect{ it.getExtension()}.get(0)}" // use the first extension of the input files
suffix = task.ext.suffix ?: "${input.collect{ file -> file.getExtension()}.get(0)}" // use the first extension of the input files

program = program_file ? "-f ${program_file}" : "${args2}"

input.collect{
assert it.name != "${prefix}.${suffix}" : "Input and output names are the same, set prefix in module configuration to disambiguate!"
input.collect{ file ->
assert file.name != "${prefix}.${suffix}" : "Input and output names are the same, set prefix in module configuration to disambiguate!"
}

"""
Expand Down
3 changes: 1 addition & 2 deletions modules/nf-core/pretextsnapshot/main.nf

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

54 changes: 27 additions & 27 deletions nextflow.config
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ params {
plaintext_email = false
monochrome_logs = false
hook_url = null
help = false
help_full = false
show_hidden = false
help = false
help_full = false
show_hidden = false
version = false
validate_params = true

Expand Down Expand Up @@ -217,7 +217,7 @@ process.shell = [
// Disable process selector warnings by default. Use debug profile to enable warnings.
nextflow.enable.configProcessNamesValidation = false

def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss' )
//def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss' )
timeline {
enabled = true
file = "${params.outdir}/pipeline_info/execution_timeline_${params.trace_report_suffix}.html"
Expand Down Expand Up @@ -293,31 +293,31 @@ validation {
command = "nextflow run sanger-tol/curationpretext -profile <docker/singularity/.../institute> --input samplesheet.csv --outdir <OUTDIR>"
fullParameter = "help_full"
showHiddenParameter = "show_hidden"
beforeText = """
-\033[2m----------------------------------------------------\033[0m-
\033[0;34m _____ \033[0;32m _______ \033[0;31m _\033[0m
\033[0;34m / ____| \033[0;32m|__ __| \033[0;31m| |\033[0m
\033[0;34m | (___ __ _ _ __ __ _ ___ _ __ \033[0m ___ \033[0;32m| |\033[0;33m ___ \033[0;31m| |\033[0m
\033[0;34m \\___ \\ / _` | '_ \\ / _` |/ _ \\ '__|\033[0m|___|\033[0;32m| |\033[0;33m/ _ \\\033[0;31m| |\033[0m
\033[0;34m ____) | (_| | | | | (_| | __/ | \033[0;32m| |\033[0;33m (_) \033[0;31m| |____\033[0m
\033[0;34m |_____/ \\__,_|_| |_|\\__, |\\___|_| \033[0;32m|_|\033[0;33m\\___/\033[0;31m|______|\033[0m
\033[0;34m __/ |\033[0m
\033[0;34m |___/\033[0m
\033[0;35m ${manifest.name} ${manifest.version}\033[0m
-\033[2m----------------------------------------------------\033[0m-
"""
afterText = """${manifest.doi ? "\n* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""}
* The nf-core framework
https://doi.org/10.1038/s41587-020-0439-x
* Software dependencies
https://github.com/sanger-tol/curationpretext/blob/main/CITATIONS.md
"""
// beforeText = """
// -\033[2m----------------------------------------------------\033[0m-
// \033[0;34m _____ \033[0;32m _______ \033[0;31m _\033[0m
// \033[0;34m / ____| \033[0;32m|__ __| \033[0;31m| |\033[0m
// \033[0;34m | (___ __ _ _ __ __ _ ___ _ __ \033[0m ___ \033[0;32m| |\033[0;33m ___ \033[0;31m| |\033[0m
// \033[0;34m \\___ \\ / _` | '_ \\ / _` |/ _ \\ '__|\033[0m|___|\033[0;32m| |\033[0;33m/ _ \\\033[0;31m| |\033[0m
// \033[0;34m ____) | (_| | | | | (_| | __/ | \033[0;32m| |\033[0;33m (_) \033[0;31m| |____\033[0m
// \033[0;34m |_____/ \\__,_|_| |_|\\__, |\\___|_| \033[0;32m|_|\033[0;33m\\___/\033[0;31m|______|\033[0m
// \033[0;34m __/ |\033[0m
// \033[0;34m |___/\033[0m
// \033[0;35m ${manifest.name} ${manifest.version}\033[0m
// -\033[2m----------------------------------------------------\033[0m-
// """
// afterText = """${manifest.doi ? "\n* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""}
// * The nf-core framework
// https://doi.org/10.1038/s41587-020-0439-x
// * Software dependencies
// https://github.com/sanger-tol/curationpretext/blob/main/CITATIONS.md
// """
}

summary {
beforeText = validation.help.beforeText
afterText = validation.help.afterText
}
// summary {
// beforeText = validation.help.beforeText
// afterText = validation.help.afterText
// }
}

// Load modules.config for DSL2 module specific options
Expand Down
4 changes: 2 additions & 2 deletions subworkflows/local/accessory_files/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ workflow ACCESSORY_FILES {


main:
ch_versions = Channel.empty()
ch_empty_file = Channel.fromPath("${baseDir}/assets/EMPTY.txt")
ch_versions = channel.empty()
ch_empty_file = channel.fromPath("${baseDir}/assets/EMPTY.txt")

//
// NOTE: THIS IS DUPLICATED IN THE CURATIONPRETEXT WORKFLOW,
Expand Down
2 changes: 1 addition & 1 deletion subworkflows/local/gap_finder/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ workflow GAP_FINDER {
reference_tuple // Channel [ val(meta), path(fasta) ]

main:
ch_versions = Channel.empty()
ch_versions = channel.empty()

//
// MODULE: GENERATES A GAP SUMMARY FILE
Expand Down
4 changes: 2 additions & 2 deletions subworkflows/local/generate_maps/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ workflow GENERATE_MAPS {


main:
ch_versions = Channel.empty()
ch_versions = channel.empty()

//
// MODULE: generate a cram csv file containing the required parametres for CRAM_FILTER_ALIGN_BWAMEM2_FIXMATE_SORT
Expand Down Expand Up @@ -80,7 +80,7 @@ workflow GENERATE_MAPS {
hires_pretext = PRETEXTMAP_HIGHRES.out.pretext
ch_versions = ch_versions.mix( PRETEXTMAP_HIGHRES.out.versions )
} else {
hires_pretext = Channel.empty()
hires_pretext = channel.empty()
}

//
Expand Down
3 changes: 1 addition & 2 deletions subworkflows/local/hic_bwamem2/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ workflow HIC_BWAMEM2 {
reference_index // Channel: tuple [ val(meta), path( fai ) ]

main:
ch_versions = Channel.empty()
mappedbam_ch = Channel.empty()
ch_versions = channel.empty()

//
// MODULE: Indexing on reference output the folder of indexing files
Expand Down
4 changes: 2 additions & 2 deletions subworkflows/local/hic_minimap2/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ workflow HIC_MINIMAP2 {
reference_index

main:
ch_versions = Channel.empty()
mappedbam_ch = Channel.empty()
ch_versions = channel.empty()
mappedbam_ch = channel.empty()


//
Expand Down
8 changes: 4 additions & 4 deletions subworkflows/local/longread_coverage/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@ workflow LONGREAD_COVERAGE {

take:
reference_tuple // Channel: [ val(meta), path( reference_file ) ]
reference_index // Channel: [ val(meta), path( reference_indx ) ]
_reference_index // Channel: [ val(meta), path( reference_indx ) ]
dot_genome // Channel: [ val(meta), [ path( datafile ) ] ]
reads_path // Channel: [ val(meta), path( str ) ]

main:
ch_versions = Channel.empty()
ch_versions = channel.empty()

//
// LOGIC: TAKE THE READ FOLDER AS INPUT AND GENERATE THE CHANNEL OF READ FILES
Expand Down Expand Up @@ -107,7 +107,7 @@ workflow LONGREAD_COVERAGE {
//
BEDTOOLS_BAMTOBED.out.bed
.combine( dot_genome )
.multiMap { meta, file, my_genome_meta, my_genome ->
.multiMap { meta, file, _my_genome_meta, my_genome ->
input_tuple : tuple (
[ id : meta.id,
single_end : true ],
Expand Down Expand Up @@ -147,7 +147,7 @@ workflow LONGREAD_COVERAGE {
GNU_SORT.out.sorted
.combine( dot_genome )
.combine( reference_tuple )
.multiMap { meta, file, meta_my_genome, my_genome, ref_meta, ref ->
.multiMap { _meta, file, _meta_my_genome, my_genome, ref_meta, _ref ->
ch_coverage_bed : tuple (
[ id: ref_meta.id,
single_end: true
Expand Down
4 changes: 2 additions & 2 deletions subworkflows/local/repeat_density/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ workflow REPEAT_DENSITY {
dot_genome

main:
ch_versions = Channel.empty()
ch_versions = channel.empty()


//
Expand Down Expand Up @@ -156,7 +156,7 @@ workflow REPEAT_DENSITY {
//
UCSC_BEDGRAPHTOBIGWIG(
GAWK_REPLACE_DOTS.out.output,
GNU_SORT_B.out.sorted.map { it[1] }
GNU_SORT_B.out.sorted.map { _meta, file -> file }
)
ch_versions = ch_versions.mix( UCSC_BEDGRAPHTOBIGWIG.out.versions )

Expand Down
4 changes: 2 additions & 2 deletions subworkflows/local/telo_extraction/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ workflow TELO_EXTRACTION {
telomere_file //tuple(meta, file)

main:
ch_versions = Channel.empty()
ch_versions = channel.empty()

//
// MODULE: GENERATES A WINDOWS FILE FROM THE ABOVE
Expand All @@ -18,7 +18,7 @@ workflow TELO_EXTRACTION {


def windows_file = FIND_TELOMERE_WINDOWS.out.windows
def safe_windows = windows_file.ifEmpty { Channel.empty() }
def safe_windows = windows_file.ifEmpty { channel.empty() }

//
// MODULE: Extract the telomere data from the FIND_TELOMERE
Expand Down
2 changes: 1 addition & 1 deletion subworkflows/local/telo_finder/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ workflow TELO_FINDER {
teloseq

main:
ch_versions = Channel.empty()
ch_versions = channel.empty()


//
Expand Down
34 changes: 17 additions & 17 deletions subworkflows/local/utils_nfcore_curationpretext_pipeline/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -79,13 +79,13 @@ workflow PIPELINE_INITIALISATION {
// Create channel from input file provided through params.input
//

input_fasta = Channel.fromPath(
input_fasta = channel.fromPath(
params.input,
checkIfExists: true,
type: 'file'
)

cram_dir = Channel.fromPath(
cram_dir = channel.fromPath(
params.cram,
checkIfExists: true,
type: 'dir'
Expand Down Expand Up @@ -117,21 +117,21 @@ workflow PIPELINE_INITIALISATION {
)
}

ch_reads = Channel
.fromPath(
params.reads,
checkIfExists: true,
type: 'dir'
)
.map { dir ->
tuple(
[ id: params.sample,
single_end: true,
read_type: params.read_type
],
dir
)
}
channel.fromPath(
params.reads,
checkIfExists: true,
type: 'dir'
)
.map { dir ->
tuple(
[ id: params.sample,
single_end: true,
read_type: params.read_type
],
dir
)
}
.set { ch_reads }

emit:
ch_reference
Expand Down
Loading