Skip to content

Commit c203faf

Browse files
authored
Merge branch 'develop' into many_pipes
2 parents 7438a7c + 6a7d31a commit c203faf

File tree

10 files changed

+236
-126
lines changed

10 files changed

+236
-126
lines changed

CHANGELOG.md

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
2626
- Optional positional argument "cfg" to `create_lesion_preproc`.
2727
- Allow enabling `overwrite_transform` only when the registration method is `ANTS`.
2828
- `resource_inventory` utility to inventory NodeBlock function inputs and outputs.
29+
- New switch `mask_sbref` under `func_input_prep` in functional registration and set to default `on`.
30+
- New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`.
31+
- `censor_file_path` from `offending_timepoints_connector` in the `build_nuisance_regressor` node.
2932

3033
### Changed
3134

@@ -34,17 +37,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
3437
- Made orientation configurable (was hard-coded as "RPI").
3538
- Resource-not-found errors now include information about where to source those resources.
3639
- Moved `ref_mask_res_2` and `T1w_template_res-2` fields from registration into surface under `abcd_prefreesurfer_prep`.
40+
- Moved `find_censors node` inside `create_nuisance_regression_workflow` into its own function/subworkflow as `offending_timepoints_connector`.
3741
- [FSL-AFNI subworkflow](https://github.com/FCP-INDI/C-PAC/blob/4bdd6c410ef0a9b90f53100ea005af1f7d6e76c0/CPAC/func_preproc/func_preproc.py#L1052C4-L1231C25)
3842
- Moved `FSL-AFNI subworkflow` from inside a `bold_mask_fsl_afni` nodeblock into a separate function.
3943
- Renamed `desc-ref_bold` created in this workflow to `desc-unifized_bold`.
4044
- `coregistration_prep_fmriprep` nodeblock now checks if `desc-unifized_bold` exists in the Resource Pool, if not it runs the `FSL-AFNI subworkflow` to create it.
45+
- Input `desc-brain_bold` to `desc-preproc_bold` for `sbref` generation nodeblock `coregistration_prep_vol`.
4146

4247
### Fixed
4348

4449
- A bug in which AWS S3 encryption was looked for in Nipype config instead of pipeline config (only affected uploading logs).
4550
- Restored `bids-validator` functionality.
4651
- Fixed empty `shell` variable in cluster run scripts.
4752
- A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows.
53+
- Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed.
4854

4955
### Removed
5056

@@ -57,7 +63,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
5763
- as output from FNIRT registration.
5864
- as inputs from Nodeblocks requesting it and, replaced with `space-template_desc-brain_mask`.
5965
- from outputs tsv.
60-
66+
- Inputs `[desc-motion_bold, bold]` from `coregistration_prep_vol` nodeblock.
67+
- `input` field from `coregistration` in blank and default config.
68+
- `reg_with_skull` swtich from `func_input_prep` in blank and default config.
6169

6270
## [1.8.7] - 2024-05-03
6371

CPAC/func_preproc/func_preproc.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1830,6 +1830,10 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None):
18301830
"Description": "The skull-stripped BOLD time-series.",
18311831
"SkullStripped": True,
18321832
},
1833+
"desc-head_bold": {
1834+
"Description": "The non skull-stripped BOLD time-series.",
1835+
"SkullStripped": False,
1836+
},
18331837
},
18341838
)
18351839
def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None):
@@ -1841,15 +1845,16 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None):
18411845
func_edge_detect.inputs.expr = "a*b"
18421846
func_edge_detect.inputs.outputtype = "NIFTI_GZ"
18431847

1844-
node, out = strat_pool.get_data("desc-preproc_bold")
1845-
wf.connect(node, out, func_edge_detect, "in_file_a")
1848+
node_head_bold, out_head_bold = strat_pool.get_data("desc-preproc_bold")
1849+
wf.connect(node_head_bold, out_head_bold, func_edge_detect, "in_file_a")
18461850

18471851
node, out = strat_pool.get_data("space-bold_desc-brain_mask")
18481852
wf.connect(node, out, func_edge_detect, "in_file_b")
18491853

18501854
outputs = {
18511855
"desc-preproc_bold": (func_edge_detect, "out_file"),
18521856
"desc-brain_bold": (func_edge_detect, "out_file"),
1857+
"desc-head_bold": (node_head_bold, out_head_bold),
18531858
}
18541859

18551860
return (wf, outputs)

CPAC/nuisance/nuisance.py

Lines changed: 162 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -496,6 +496,104 @@ def gather_nuisance(
496496
return output_file_path, censor_indices
497497

498498

499+
def offending_timepoints_connector(
500+
nuisance_selectors, name="offending_timepoints_connector"
501+
):
502+
inputspec = pe.Node(
503+
util.IdentityInterface(
504+
fields=[
505+
"fd_j_file_path",
506+
"fd_p_file_path",
507+
"dvars_file_path",
508+
]
509+
),
510+
name="inputspec",
511+
)
512+
513+
wf = pe.Workflow(name=name)
514+
515+
outputspec = pe.Node(
516+
util.IdentityInterface(fields=["out_file"]),
517+
name="outputspec",
518+
)
519+
520+
censor_selector = nuisance_selectors.get("Censor")
521+
522+
find_censors = pe.Node(
523+
Function(
524+
input_names=[
525+
"fd_j_file_path",
526+
"fd_j_threshold",
527+
"fd_p_file_path",
528+
"fd_p_threshold",
529+
"dvars_file_path",
530+
"dvars_threshold",
531+
"number_of_previous_trs_to_censor",
532+
"number_of_subsequent_trs_to_censor",
533+
],
534+
output_names=["out_file"],
535+
function=find_offending_time_points,
536+
as_module=True,
537+
),
538+
name="find_offending_time_points",
539+
)
540+
541+
if not censor_selector.get("thresholds"):
542+
msg = "Censoring requested, but thresh_metric not provided."
543+
raise ValueError(msg)
544+
545+
for threshold in censor_selector["thresholds"]:
546+
if "type" not in threshold or threshold["type"] not in [
547+
"DVARS",
548+
"FD_J",
549+
"FD_P",
550+
]:
551+
msg = "Censoring requested, but with invalid threshold type."
552+
raise ValueError(msg)
553+
554+
if "value" not in threshold:
555+
msg = "Censoring requested, but threshold not provided."
556+
raise ValueError(msg)
557+
558+
if threshold["type"] == "FD_J":
559+
find_censors.inputs.fd_j_threshold = threshold["value"]
560+
wf.connect(inputspec, "fd_j_file_path", find_censors, "fd_j_file_path")
561+
562+
if threshold["type"] == "FD_P":
563+
find_censors.inputs.fd_p_threshold = threshold["value"]
564+
wf.connect(inputspec, "fd_p_file_path", find_censors, "fd_p_file_path")
565+
566+
if threshold["type"] == "DVARS":
567+
find_censors.inputs.dvars_threshold = threshold["value"]
568+
wf.connect(inputspec, "dvars_file_path", find_censors, "dvars_file_path")
569+
570+
if (
571+
censor_selector.get("number_of_previous_trs_to_censor")
572+
and censor_selector["method"] != "SpikeRegression"
573+
):
574+
find_censors.inputs.number_of_previous_trs_to_censor = censor_selector[
575+
"number_of_previous_trs_to_censor"
576+
]
577+
578+
else:
579+
find_censors.inputs.number_of_previous_trs_to_censor = 0
580+
581+
if (
582+
censor_selector.get("number_of_subsequent_trs_to_censor")
583+
and censor_selector["method"] != "SpikeRegression"
584+
):
585+
find_censors.inputs.number_of_subsequent_trs_to_censor = censor_selector[
586+
"number_of_subsequent_trs_to_censor"
587+
]
588+
589+
else:
590+
find_censors.inputs.number_of_subsequent_trs_to_censor = 0
591+
592+
wf.connect(find_censors, "out_file", outputspec, "out_file")
593+
594+
return wf
595+
596+
499597
def create_regressor_workflow(
500598
nuisance_selectors,
501599
use_ants,
@@ -1547,6 +1645,38 @@ def create_regressor_workflow(
15471645
"functional_file_path",
15481646
)
15491647

1648+
if nuisance_selectors.get("Censor"):
1649+
if nuisance_selectors["Censor"]["method"] == "SpikeRegression":
1650+
offending_timepoints_connector_wf = offending_timepoints_connector(
1651+
nuisance_selectors
1652+
)
1653+
nuisance_wf.connect(
1654+
[
1655+
(
1656+
inputspec,
1657+
offending_timepoints_connector_wf,
1658+
[("fd_j_file_path", "inputspec.fd_j_file_path")],
1659+
),
1660+
(
1661+
inputspec,
1662+
offending_timepoints_connector_wf,
1663+
[("fd_p_file_path", "inputspec.fd_p_file_path")],
1664+
),
1665+
(
1666+
inputspec,
1667+
offending_timepoints_connector_wf,
1668+
[("dvars_file_path", "inputspec.dvars_file_path")],
1669+
),
1670+
]
1671+
)
1672+
1673+
nuisance_wf.connect(
1674+
offending_timepoints_connector_wf,
1675+
"outputspec.out_file",
1676+
build_nuisance_regressors,
1677+
"censor_file_path",
1678+
)
1679+
15501680
build_nuisance_regressors.inputs.selector = nuisance_selectors
15511681

15521682
# Check for any regressors to combine into files
@@ -1656,93 +1786,28 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre
16561786
nuisance_wf = pe.Workflow(name=name)
16571787

16581788
if nuisance_selectors.get("Censor"):
1659-
censor_methods = ["Kill", "Zero", "Interpolate", "SpikeRegression"]
1660-
1661-
censor_selector = nuisance_selectors.get("Censor")
1662-
if censor_selector.get("method") not in censor_methods:
1663-
msg = (
1664-
"Improper censoring method specified ({0}), "
1665-
"should be one of {1}.".format(
1666-
censor_selector.get("method"), censor_methods
1667-
)
1668-
)
1669-
raise ValueError(msg)
1670-
1671-
find_censors = pe.Node(
1672-
Function(
1673-
input_names=[
1674-
"fd_j_file_path",
1675-
"fd_j_threshold",
1676-
"fd_p_file_path",
1677-
"fd_p_threshold",
1678-
"dvars_file_path",
1679-
"dvars_threshold",
1680-
"number_of_previous_trs_to_censor",
1681-
"number_of_subsequent_trs_to_censor",
1682-
],
1683-
output_names=["out_file"],
1684-
function=find_offending_time_points,
1685-
as_module=True,
1686-
),
1687-
name="find_offending_time_points",
1789+
offending_timepoints_connector_wf = offending_timepoints_connector(
1790+
nuisance_selectors
16881791
)
1689-
1690-
if not censor_selector.get("thresholds"):
1691-
msg = "Censoring requested, but thresh_metric not provided."
1692-
raise ValueError(msg)
1693-
1694-
for threshold in censor_selector["thresholds"]:
1695-
if "type" not in threshold or threshold["type"] not in [
1696-
"DVARS",
1697-
"FD_J",
1698-
"FD_P",
1699-
]:
1700-
msg = "Censoring requested, but with invalid threshold type."
1701-
raise ValueError(msg)
1702-
1703-
if "value" not in threshold:
1704-
msg = "Censoring requested, but threshold not provided."
1705-
raise ValueError(msg)
1706-
1707-
if threshold["type"] == "FD_J":
1708-
find_censors.inputs.fd_j_threshold = threshold["value"]
1709-
nuisance_wf.connect(
1710-
inputspec, "fd_j_file_path", find_censors, "fd_j_file_path"
1711-
)
1712-
1713-
if threshold["type"] == "FD_P":
1714-
find_censors.inputs.fd_p_threshold = threshold["value"]
1715-
nuisance_wf.connect(
1716-
inputspec, "fd_p_file_path", find_censors, "fd_p_file_path"
1717-
)
1718-
1719-
if threshold["type"] == "DVARS":
1720-
find_censors.inputs.dvars_threshold = threshold["value"]
1721-
nuisance_wf.connect(
1722-
inputspec, "dvars_file_path", find_censors, "dvars_file_path"
1723-
)
1724-
1725-
if (
1726-
censor_selector.get("number_of_previous_trs_to_censor")
1727-
and censor_selector["method"] != "SpikeRegression"
1728-
):
1729-
find_censors.inputs.number_of_previous_trs_to_censor = censor_selector[
1730-
"number_of_previous_trs_to_censor"
1731-
]
1732-
1733-
else:
1734-
find_censors.inputs.number_of_previous_trs_to_censor = 0
1735-
1736-
if (
1737-
censor_selector.get("number_of_subsequent_trs_to_censor")
1738-
and censor_selector["method"] != "SpikeRegression"
1739-
):
1740-
find_censors.inputs.number_of_subsequent_trs_to_censor = censor_selector[
1741-
"number_of_subsequent_trs_to_censor"
1792+
nuisance_wf.connect(
1793+
[
1794+
(
1795+
inputspec,
1796+
offending_timepoints_connector_wf,
1797+
[("fd_j_file_path", "inputspec.fd_j_file_path")],
1798+
),
1799+
(
1800+
inputspec,
1801+
offending_timepoints_connector_wf,
1802+
[("fd_p_file_path", "inputspec.fd_p_file_path")],
1803+
),
1804+
(
1805+
inputspec,
1806+
offending_timepoints_connector_wf,
1807+
[("dvars_file_path", "inputspec.dvars_file_path")],
1808+
),
17421809
]
1743-
1744-
else:
1745-
find_censors.inputs.number_of_subsequent_trs_to_censor = 0
1810+
)
17461811

17471812
# Use 3dTproject to perform nuisance variable regression
17481813
nuisance_regression = pe.Node(
@@ -1757,17 +1822,19 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre
17571822
nuisance_regression.inputs.norm = False
17581823

17591824
if nuisance_selectors.get("Censor"):
1760-
if nuisance_selectors["Censor"]["method"] == "SpikeRegression":
1761-
nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor")
1762-
else:
1763-
if nuisance_selectors["Censor"]["method"] == "Interpolate":
1764-
nuisance_regression.inputs.cenmode = "NTRP"
1765-
else:
1766-
nuisance_regression.inputs.cenmode = nuisance_selectors["Censor"][
1767-
"method"
1768-
].upper()
1825+
if nuisance_selectors["Censor"]["method"] != "SpikeRegression":
1826+
nuisance_regression.inputs.cenmode = (
1827+
"NTRP"
1828+
if nuisance_selectors["Censor"]["method"] == "Interpolate"
1829+
else nuisance_selectors["Censor"]["method"].upper()
1830+
)
17691831

1770-
nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor")
1832+
nuisance_wf.connect(
1833+
offending_timepoints_connector_wf,
1834+
"outputspec.out_file",
1835+
nuisance_regression,
1836+
"censor",
1837+
)
17711838

17721839
if nuisance_selectors.get("PolyOrt"):
17731840
if not nuisance_selectors["PolyOrt"].get("degree"):

CPAC/nuisance/utils/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ def find_offending_time_points(
139139
censor_vector[extended_censors] = 0
140140

141141
out_file_path = os.path.join(os.getcwd(), "censors.tsv")
142-
np.savetxt(out_file_path, censor_vector, fmt="%d", header="censor", comments="")
142+
np.savetxt(out_file_path, censor_vector, fmt="%d", comments="")
143143

144144
return out_file_path
145145

CPAC/pipeline/cpac_pipeline.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,7 @@
148148
coregistration_prep_vol,
149149
create_func_to_T1template_symmetric_xfm,
150150
create_func_to_T1template_xfm,
151+
mask_sbref,
151152
overwrite_transform_anat_to_template,
152153
register_ANTs_anat_to_template,
153154
register_ANTs_EPI_to_template,
@@ -1288,6 +1289,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None):
12881289
coregistration_prep_mean,
12891290
coregistration_prep_fmriprep,
12901291
],
1292+
mask_sbref,
12911293
]
12921294

12931295
# Distortion/Susceptibility Correction

0 commit comments

Comments
 (0)