Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ include {quantification} from "$projectDir/modules/quantification"
include {downstream} from "$projectDir/modules/downstream"
include {viz} from "$projectDir/modules/viz"
include {background} from "$projectDir/modules/background"
include {gator} from "$projectDir/modules/gator"

// Define the primary mcmicro workflow
workflow {
Expand Down Expand Up @@ -161,7 +162,10 @@ workflow {

// Spatial feature tables -> cell state calling
sft = quantification.out.mix(pre_qty)

// Downstream analyses
downstream(mcp, sft)
gator(mcp, allimg, segMsk, sft)

// Vizualization
viz(mcp, allimg, chMrk)
Expand Down
152 changes: 152 additions & 0 deletions modules/gator.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
/*
A template for adding new modules to MCMICRO

Step 1: Add module specs to config/defaults.yml

a: Add a flag specifying whether the module should be run to workflow:
b: Add default module options to options:
c: Add module name and container specs to modules:

For example, support we wanted to add a module that produces a QC report
about the signal-to-noise ratio (snr). The three additions to defaults.yml
may then look as follows:

workflow:
report: false
options:
snr: --cool-parameter 42
modules:
report:
name: snr
container: myorganization/snr
version: 1.0.0

Step 2: Modify the code below as needed

Step 3: Run the module from the main workflow in main.nf

a: add an include statement to import the relevant workflow. For example:

...
include {downstream} from "$projectDir/modules/downstream"
include {viz} from "$projectDir/modules/viz"
include {report} from "$projectDir/modules/report" // <- importing the new module

b: add a statement calling the module near the bottom of the main workflow:

...
downstream(mcp, sft)

report(mcp, allimg, sft) // <- calling the new module

// Vizualization
viz(mcp, allimg)
...

*/

// Import utility functions from lib/mcmicro/*.groovy
import mcmicro.*

// Process name will appear in the the nextflow execution log
// While not strictly required, it's a good idea to make the
// process name match your tool name to avoid user confusion
process gatorpy {

// Use the container specification from the parameter file
// No change to this line is required
container "${params.contPfx}${module.container}:${module.version}"

// Specify the project subdirectory for writing the outputs to
// The pattern: specification must match the output: files below
// TODO: replace report with the desired output directory
// TODO: replace the pattern to match the output: clause below
publishDir "${params.in}/gator", mode: 'copy', pattern: "GATOR/*"

// Stores .command.sh and .command.log from the work directory
// to the project provenance
// No change to this line is required
publishDir "${Flow.QC(params.in, 'provenance')}", mode: 'copy',
pattern: '.command.{sh,log}',
saveAs: {fn -> fn.replace('.command', "${module.name}-${task.index}")}

// Inputs for the process
// mcp - MCMICRO parameters (workflow, options, etc.)
// module - module specifications (name, container, options, etc.)
// img/sft - pairs of images and their matching spatial feature tables
input:
val mcp
val module
path markers
path gatorModel
tuple path(img), path(mask), path(sft)

// Process outputs that should be captured and
// a) returned as results
// b) published to the project directory
// TODO: replace *.html with the pattern of the tool output files
output:
path("GATOR/*"), emit: results

// Provenance files -- no change is needed here
tuple path('.command.sh'), path('.command.log')

// Specifies whether to run the process
// Here, we simply take the flag from the workflow parameters
// TODO: change snr to match the true/false workflow parameter in defaults.yml
when: mcp.workflow["gator"]

// The command to be executed inside the tool container
// The command must write all outputs to the current working directory (.)
// Opts.moduleOpts() will identify and return the appropriate module options
"""
python /app/gatorPipeline.py --projectDir . \
--imagePath $img \
--markerChannelMapPath $markers \
--gatorModelPath $gatorModel \
--segmentationPath $mask \
--spatialTablePath $sft \
--features $sft \
${Opts.moduleOpts(module, mcp)}
"""
}

workflow gator {

take:
mcp // MCMICRO parameters (workflow, options, etc.)
imgs // images
msks // segmentation masks
sfts // spatial feature tables

main:

// Identify marker information and the GATOR model
markers = Channel.fromPath( "${params.in}/markers.csv", checkIfExists: true )
model = Channel.fromPath( "${params.in}/models/gator.pkl" )

// Retrieve image tags
id_imgs = imgs.map{ it -> tuple(Util.getImageID(it), it) }

// Retrieve the three mask tags: image, method, compartment
id_msks = msks.map{ id, msk -> x = id.split('-',2); tuple(x[1], x[0], msk)}
.transpose() // Split up the list of masks
.map{ _1, _2, msk -> tuple(_1, _2, Util.getImageID(msk), msk)}

// Retrieve the three sft tags: image, method, compartment
id_sfts = sfts.map{ it ->
x = it.getBaseName().toString().split('--')
y = x[1].split('_')
tuple(x[0], y[0], y[1], it)
}

// Match everything up and strip the tags
// Put the inputs in the correct order to match gatorpy spec
inputs = id_msks.combine(id_sfts, by: [0,1,2]).combine(id_imgs, by: 0)\
.map{_1, _2, _3, msk, sft, img -> tuple(img, msk, sft)}.view()
// gatorpy(mcp, mcp.modules['gator'], markers, model, inputs)

// Return the outputs produced by the tool
// emit:
// gatorpy.out.results
}