Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions modules/nf-neuro/bundle/bundleparc/environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
channels: []
dependencies: []
name: bundle_bundleparc
63 changes: 63 additions & 0 deletions modules/nf-neuro/bundle/bundleparc/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
process BUNDLE_BUNDLEPARC {
tag "$meta.id"
label 'process_single'

container "scilus/scilus:2.2.0"

input:
tuple val(meta), path(fodf), path(checkpoint)

output:
tuple val(meta), path("*.nii.gz"), emit: labels
path "versions.yml", emit: versions
path "*__bundleparc_config.json", emit: config

when:
task.ext.when == null || task.ext.when

script:
def prefix = task.ext.prefix ?: "${meta.id}"
def nb_pts = task.ext.nb_pts ?: 10

"""
export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1
export OMP_NUM_THREADS=1

stride="\$( mrinfo -stride $fodf )"
if [[ "\$stride" == "-1 2 3 4" ]]; then
scil_fodf_bundleparc $fodf \
--out_prefix ${prefix}__ \
--nb_pts ${nb_pts} \
--out_folder tmp \
--checkpoint ${checkpoint} \
--keep_biggest
mv tmp/* .
rm -r tmp
else
echo "Invalid stride ("\$stride"), must be -1 2 3 4"
exit 1
fi
cat <<-BUNDLEPARC_INFO > ${prefix}__bundleparc_config.json
{"nb_pts": "${task.ext.nb_pts}"}
BUNDLEPARC_INFO

cat <<-END_VERSIONS > versions.yml
"${task.process}":
scilpy: \$(uv pip -q -n list | grep scilpy | tr -s ' ' | cut -d' ' -f2)
END_VERSIONS
"""

stub:
def prefix = task.ext.prefix ?: "${meta.id}"
"""
scil_fodf_bundleparc -h

touch ${prefix}__AF_left.nii.gz
${prefix}__bundleparc_config.json

cat <<-END_VERSIONS > versions.yml
"${task.process}":
scilpy: \$(uv pip -q -n list | grep scilpy | tr -s ' ' | cut -d' ' -f2)
END_VERSIONS
"""
}
92 changes: 92 additions & 0 deletions modules/nf-neuro/bundle/bundleparc/meta.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
name: "bundle_bundleparc"
description: |
BundleParc performs bundle segmentation and parcellation directly from fODFs
for a specified number of points/segments/labels.

Bundle definitions follow TractSeg's, without the whole corpus callosum.
However it is still represented in 7 subparts which should be coherent
between their parcellations.

If you use this subworkflow, please cite:
[1] Théberge, Antoine, Descoteaux, Maxime, Jodoin, P-M.
"BundleParc: off-the-shelf bundle parcellation without tractography."
Submitted to Medical Image Analysis (2025).
keywords:
- BundleParc
- Bundle
- Parcellation
tools:
- "bundleparc":
description: |
BundleParc: off-the-shelf bundle parcellation without tractography.
homepage: "https://github.com/scil-vital/BundleParc"
tool_dev_url: "https://github.com/scil-vital/BundleParc-flow"
identifier: ""
- scilpy:
description: The Sherbrooke Connectivity Imaging Lab (SCIL) Python dMRI
processing toolbox.
homepage: https://github.com/scilus/scilpy.git
identifier: ""

input:
- - meta:
type: map
description: |
Groovy Map containing sample information
e.g. `[ id:'sample1', single_end:false ]`
- fodf:
type: file
description: |
The fODF image in the descoteaux_legacy and order 8 format (very
important).
pattern: "*.{nii,nii.gz}"
ontologies: []
- checkpoint:
type: file
description: |
Since BundleParc relies on deep learning, the checkpoint represents
the learned weights of BundleParc's underlying model, which is
essential for its operation. The checkpoint file should be
downloaded automatically when using the subworkflow, otherwise it
can be found at:
https://zenodo.org/records/15579498/files/123_4_5_bundleparc.ckpt
pattern: "123_4_5_bundleparc.ckpt"
ontologies: []
args:
- nb_points:
type: int
description: |
Number of regions/points to parcellate the bundles into.
default: 10
output:
labels:
- - meta:
type: map
description: |
Groovy Map containing sample information
e.g. `[ id:'sample1', single_end:false ]`
- "*.nii.gz":
type: file
description: |
The output parcellated bundles.
pattern: "*.{nii.gz}"
ontologies: []
config:
- "*__bundleparc_config.json":
type: file
description: Parameters used to obtain the output parcellation files.
pattern: "*__bundleparc_config.json"
ontologies:
- edam: "http://edamontology.org/format_3464" # JSON
versions:
- "versions.yml":
type: file
description: File containing software versions
pattern: "versions.yml"
ontologies:
- edam: "http://edamontology.org/format_3750" # YAML

authors:
- "@AntoineTheb"
maintainers:
- "@levje"
74 changes: 74 additions & 0 deletions modules/nf-neuro/bundle/bundleparc/tests/main.nf.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
// TODO nf-core: Once you have added the required tests, please run the following command to build this file:
// nf-core modules test bundle/bundleparc
nextflow_process {

name "Test Process BUNDLE_BUNDLEPARC"
script "../main.nf"
process "BUNDLE_BUNDLEPARC"

tag "modules"
tag "modules_nfneuro"
tag "bundle"
tag "bundle/bundleparc"

// TODO nf-core: Change the test name preferably indicating the test-data and file-format used
test("sarscov2 - bam") {

// TODO nf-core: If you are created a test for a chained module
// (the module requires running more than one process to generate the required output)
// add the 'setup' method here.
// You can find more information about how to use a 'setup' method in the docs (https://nf-co.re/docs/contributing/modules#steps-for-creating-nf-test-for-chained-modules).

when {
process {
"""
// TODO nf-core: define inputs of the process here. Example:

input[0] = [
[ id:'test', single_end:false ], // meta map
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true),
]
"""
}
}

then {
assertAll(
{ assert process.success },
{ assert snapshot(process.out).match() }
//TODO nf-core: Add all required assertions to verify the test output.
// See https://nf-co.re/docs/contributing/tutorials/nf-test_assertions for more information and examples.
)
}

}

// TODO nf-core: Change the test name preferably indicating the test-data and file-format used but keep the " - stub" suffix.
test("sarscov2 - bam - stub") {

options "-stub"

when {
process {
"""
// TODO nf-core: define inputs of the process here. Example:

input[0] = [
[ id:'test', single_end:false ], // meta map
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true),
]
"""
}
}

then {
assertAll(
{ assert process.success },
{ assert snapshot(process.out).match() }
//TODO nf-core: Add all required assertions to verify the test output.
)
}

}

}
72 changes: 72 additions & 0 deletions subworkflows/nf-neuro/bundleparc/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
include { BUNDLE_BUNDLEPARC } from '../../../modules/nf-neuro/bundle/bundleparc/main.nf'

def compute_file_hash(file_path) {
def file = new File(file_path)
if (!file.exists()) {
error "File not found: $file_path"
}

def digest = java.security.MessageDigest.getInstance("MD5")
def fileBytes = java.nio.file.Files.readAllBytes(java.nio.file.Paths.get(file_path))
def hashBytes = digest.digest(fileBytes)
return hashBytes.collect { String.format("%02x", it) }.join('')
}

def fetch_bundleparc_checkpoint(dest) {
def checkpoint_url = "https://zenodo.org/records/15579498/files/123_4_5_bundleparc.ckpt"
def checkpoint_md5 = ""

if (file("$workflow.workDir/weights/123_4_5_bundleparc.ckpt").exists()) {
def existing_md5 = compute_file_hash("$workflow.workDir/weights/123_4_5_bundleparc.ckpt")
if (existing_md5 == checkpoint_md5) {
println "BundleParc checkpoint already exists and is valid."
return "$workflow.workDir/weights/123_4_5_bundleparc.ckpt"
} else {
println "Existing BundleParc checkpoint is invalid. Re-downloading..."
new File("$workflow.workDir/weights/123_4_5_bundleparc.ckpt").delete()
}
}

def path = java.nio.file.Paths.get("$dest/weights/")
if (!java.nio.file.Files.exists(path)) {
java.nio.file.Files.createDirectories(path)
}

println("Downloading BundleParc checkpoint from $checkpoint_url...")
def weights = new File("$dest/weights/123_4_5_bundleparc.ckpt").withOutputStream { out ->
new URL(checkpoint_url).withInputStream { from -> out << from; }
}
println("Download completed.")

return weights
}

workflow BUNDLEPARC {

take:
ch_fodf // channel: [ val(meta), [ fodf ] ]

main:
ch_versions = Channel.empty()
ch_multiqc_files = Channel.empty()

if ( params.checkpoint ) {
weights = Channel.fromPath("$params.checkpoint", checkIfExists: true, relative: true)
}
else {
if ( !file("$workflow.workDir/weights/123_4_5_bundleparc.ckpt").exists() ) {
fetch_bundleparc_checkpoint("${workflow.workDir}/")
}
weights = Channel.fromPath("$workflow.workDir/weights/123_4_5_bundleparc.ckpt", checkIfExists: true)
}

ch_fodf = ch_fodf.combine(weights)

BUNDLE_BUNDLEPARC(ch_fodf)
ch_versions = ch_versions.mix(BUNDLE_BUNDLEPARC.out.versions)

emit:
bundles = BUNDLE_BUNDLEPARC.out.labels // channel: [ val(meta), [ bundles ] ]
mqc = ch_multiqc_files // channel: [ multiqc files ]
versions = ch_versions // channel: [ versions.yml ]
}
49 changes: 49 additions & 0 deletions subworkflows/nf-neuro/bundleparc/meta.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
name: "bundleparc"
description: |
BundleParc performs bundle segmentation and parcellation directly from fODFs
for a specified number of points/segments/labels.

Bundle definitions follow TractSeg's, without the whole corpus callosum.
However it is still represented in 7 subparts which should be coherent
between their parcellations.

If you use this subworkflow, please cite:
[1] Théberge, Antoine, Descoteaux, Maxime, Jodoin, P-M.
"BundleParc: off-the-shelf bundle parcellation without tractography."
Submitted to Medical Image Analysis (2025).
keywords:
- BundleParc
- Bundle
- Parcellation
components:
- bundle/bundleparc
input:
- ch_fodf:
type: file
description: |
The input channel containing the fODF image in descoteaux07_legacy
format (very important).
pattern: "*.{nii,nii.gz}"
output:
- bundles:
type: file
description: |
Channel containing all the parcellated bundle files.
Structure: [ val(meta), path(bundles) ]
pattern: "*.nii.gz"
- mqc:
type: file
description: |
Channel containing mosaics of a subset of bundles for QC purposes.
Structure: [ val(meta), path(mqc) ]
pattern: "*mqc.png"
- versions:
type: file
description: |
File containing software versions
Structure: [ path(versions.yml) ]
pattern: "versions.yml"
authors:
- "@AntoineTheb"
maintainers:
- "@levje"
Loading
Loading