Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 14 additions & 12 deletions docs/workflows.rst
Original file line number Diff line number Diff line change
Expand Up @@ -942,29 +942,27 @@ Amplitude of low-frequency fluctuation (ALFF) is a measure that ostensibly local
spontaneous neural activity in resting-state BOLD data.
It is calculated by the following:

1. The ``denoised, interpolated BOLD`` is passed along to the ALFF workflow.
2. If censoring+interpolation was performed, then the interpolated time series is censored at this
point.
3. Voxel-wise BOLD time series are normalized (mean-centered and scaled to unit standard deviation)
1. The ``denoised BOLD`` is passed along to the ALFF workflow.
2. Voxel-wise BOLD time series are normalized (mean-centered and scaled to unit standard deviation)
over time. This will ensure that the power spectrum from ``periodogram`` and ``lombscargle``
are roughly equivalent.
4. The power spectrum and associated frequencies are estimated from the BOLD data.
3. The power spectrum and associated frequencies are estimated from the BOLD data.

- If censoring+interpolation was not performed, then this uses :func:`scipy.signal.periodogram`.
- If censoring+interpolation was performed, then this uses :func:`scipy.signal.lombscargle`.

5. The square root of the power spectrum is calculated.
6. The power spectrum values corresponding to the frequency range retained by the
4. The square root of the power spectrum is calculated.
5. The power spectrum values corresponding to the frequency range retained by the
temporal filtering step are extracted from the full power spectrum.
7. The mean of the within-band power spectrum is calculated and multiplied by 2.
8. The ALFF value is multiplied by the standard deviation of the voxel-wise
6. The mean of the within-band power spectrum is calculated and multiplied by 2.
7. The ALFF value is multiplied by the standard deviation of the voxel-wise
``denoised, interpolated BOLD`` time series.
This brings ALFF back to its original scale, as if the time series was not normalized.

ALFF will only be calculated if the bandpass filter is enabled
(i.e., if the ``--disable-bandpass-filter`` flag is not used).

Smoothed ALFF derivatives will also be generated if the ``--smoothing`` flag is used.
ALFF will also be calculated from the smoothed, denoised BOLD if the ``--smoothing`` flag is used.


ReHo
Expand All @@ -976,8 +974,12 @@ Regional Homogeneity (ReHo) is a measure of local temporal uniformity in the BOL
Greater ReHo values correspond to greater synchrony among BOLD activity patterns measured in a local neighborhood of voxels, with neighborhood size determined by a user-specified radius of voxels.
ReHo is calculated as the coefficient of concordance among all voxels in a sphere centered on the target voxel.

For NIfTIs, ReHo is always calculated via AFNI’s 3dReho with 27 voxels in each neighborhood, using Kendall's coefficient of concordance (KCC).
For CIFTIs, the left and right hemisphere are extracted into GIFTI format via Connectome Workbench’s CIFTISeparateMetric. Next, the mesh adjacency matrix is obtained,and Kendall's coefficient of concordance (KCC) is calculated, with each vertex having four neighbors.
For NIfTIs, ReHo is always calculated via AFNI's 3dReho with 27 voxels in each neighborhood,
using Kendall's coefficient of concordance (KCC).
For CIFTIs, the left and right hemisphere are extracted into GIFTI format via
Connectome Workbench's CIFTISeparateMetric.
Next, the mesh adjacency matrix is obtained, and Kendall's coefficient of concordance (KCC) is calculated,
with each vertex having 5-6 neighbors.
For subcortical voxels in the CIFTIs, 3dReho is used with the same parameters that are used for NIfTIs.


Expand Down
1 change: 1 addition & 0 deletions xcp_d/tests/test_utils_restingstate.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ def test_compute_alff(ds001419_data):
# Now try with a sample mask
sample_mask = np.ones(bold_data.shape[1], dtype=bool)
sample_mask[20:30] = False
bold_data = bold_data[:, sample_mask]

alff2 = restingstate.compute_alff_chunk((bold_data, 0.1, 0.01, TR, sample_mask))

Expand Down
10 changes: 10 additions & 0 deletions xcp_d/tests/test_workflows_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def test_nifti_alff(ds001419_data, tmp_path_factory):
alff_wf.base_dir = tempdir
alff_wf.inputs.inputnode.bold_mask = bold_mask
alff_wf.inputs.inputnode.denoised_bold = bold_file
alff_wf.inputs.inputnode.smoothed_bold = bold_file
alff_wf = clean_datasinks(alff_wf)
compute_alff_res = alff_wf.run()

Expand Down Expand Up @@ -94,6 +95,7 @@ def test_nifti_alff(ds001419_data, tmp_path_factory):
alff_wf.base_dir = tempdir
alff_wf.inputs.inputnode.bold_mask = bold_mask
alff_wf.inputs.inputnode.denoised_bold = filename
alff_wf.inputs.inputnode.smoothed_bold = filename
alff_wf = clean_datasinks(alff_wf)
compute_alff_res = alff_wf.run()
nodes = get_nodes(compute_alff_res)
Expand Down Expand Up @@ -140,6 +142,7 @@ def test_cifti_alff(ds001419_data, tmp_path_factory):
alff_wf.base_dir = tempdir
alff_wf.inputs.inputnode.bold_mask = bold_mask
alff_wf.inputs.inputnode.denoised_bold = bold_file
alff_wf.inputs.inputnode.smoothed_bold = bold_file
alff_wf = clean_datasinks(alff_wf)
compute_alff_res = alff_wf.run()

Expand Down Expand Up @@ -169,9 +172,16 @@ def test_cifti_alff(ds001419_data, tmp_path_factory):

# Now let's compute ALFF for the new file and see how it compares
tempdir = tmp_path_factory.mktemp('test_cifti_alff_02')
config.workflow.smoothing = 0
alff_wf = metrics.init_alff_wf(
name_source=bold_file,
TR=TR,
mem_gb=mem_gbx,
)
alff_wf.base_dir = tempdir
alff_wf.inputs.inputnode.bold_mask = bold_mask
alff_wf.inputs.inputnode.denoised_bold = filename
alff_wf = clean_datasinks(alff_wf)
compute_alff_res = alff_wf.run()
nodes = get_nodes(compute_alff_res)

Expand Down
5 changes: 2 additions & 3 deletions xcp_d/utils/doc.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,6 @@
smoothing : :obj:`float`
The full width at half maximum (FWHM), in millimeters,
of the Gaussian smoothing kernel that will be applied to the post-processed and denoised data.
ALFF and ReHo outputs will also be smoothing with this kernel.
"""

docdict['custom_confounds_folder'] = """
Expand Down Expand Up @@ -381,8 +380,8 @@
smoothed_denoised_bold : :obj:`str`
Path to the censored, denoised, interpolated, filtered, re-censored, and smoothed BOLD file.
This file is the result of denoising the censored preprocessed BOLD data,
followed by cubic spline interpolation, band-pass filtering, re-censoring, and spatial
smoothing.
followed by cubic spline interpolation, band-pass filtering,
re-censoring (if output_interpolated is False), and spatial smoothing.
"""

docdict['atlases'] = """
Expand Down
13 changes: 11 additions & 2 deletions xcp_d/utils/restingstate.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def compute_alff(*, data_matrix, low_pass, high_pass, TR, sample_mask):
Parameters
----------
data_matrix : numpy.ndarray
data matrix points by timepoints
data matrix points by timepoints, after optional censoring
low_pass : float
low pass frequency in Hz
high_pass : float
Expand Down Expand Up @@ -161,6 +161,15 @@ def compute_alff(*, data_matrix, low_pass, high_pass, TR, sample_mask):
fs = 1 / TR # sampling frequency
n_voxels, n_volumes = data_matrix.shape

if sample_mask is not None:
if sample_mask.sum() != n_volumes:
raise ValueError(f'{sample_mask.sum()} != {n_volumes}')

time_arr = np.arange(0, sample_mask.size * TR, TR)
time_arr = time_arr[sample_mask]
if time_arr.size != n_volumes:
raise ValueError(f'{time_arr.size} != {n_volumes}')

alff = np.zeros(n_voxels)
for i_voxel in range(n_voxels):
voxel_data = data_matrix[i_voxel, :]
Expand Down Expand Up @@ -189,7 +198,7 @@ def compute_alff(*, data_matrix, low_pass, high_pass, TR, sample_mask):
angular_frequencies = 2 * np.pi * frequencies_hz
power_spectrum = signal.lombscargle(
time_arr,
voxel_data_censored,
voxel_data,
angular_frequencies,
normalize=True,
)
Expand Down
3 changes: 2 additions & 1 deletion xcp_d/workflows/bold/cifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,8 @@ def init_postprocess_cifti_wf(
('outputnode.temporal_mask', 'inputnode.temporal_mask'),
]),
(denoise_bold_wf, alff_wf, [
('outputnode.denoised_interpolated_bold', 'inputnode.denoised_bold'),
('outputnode.censored_denoised_bold', 'inputnode.denoised_bold'),
('outputnode.smoothed_denoised_bold', 'inputnode.smoothed_bold'),
]),
]) # fmt:skip

Expand Down
112 changes: 55 additions & 57 deletions xcp_d/workflows/bold/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,18 @@
from nipype.interfaces import utility as niu
from nipype.pipeline import engine as pe
from niworkflows.engine.workflows import LiterateWorkflow as Workflow
from templateflow.api import get as get_template

from xcp_d import config
from xcp_d.config import dismiss_hash
from xcp_d.interfaces.bids import DerivativesDataSink
from xcp_d.interfaces.nilearn import Smooth
from xcp_d.interfaces.plotting import PlotDenseCifti, PlotNifti
from xcp_d.interfaces.restingstate import ComputeALFF, ReHoNamePatch, SurfaceReHo
from xcp_d.interfaces.workbench import (
CiftiCreateDenseFromTemplate,
CiftiSeparateMetric,
CiftiSeparateVolumeAll,
CiftiSmooth,
FixCiftiIntent,
)
from xcp_d.utils.doc import fill_doc
from xcp_d.utils.utils import fwhm2sigma


@fill_doc
Expand Down Expand Up @@ -62,8 +57,9 @@ def init_alff_wf(
Inputs
------
denoised_bold
This is the ``filtered, interpolated, denoised BOLD``,
although interpolation is not necessary if the data were not originally censored.
This is the denoised BOLD after optional re-censoring.
smoothed_bold
Smoothed BOLD after optional re-censoring.
bold_mask
bold mask if bold is nifti
temporal_mask
Expand Down Expand Up @@ -125,6 +121,7 @@ def init_alff_wf(
niu.IdentityInterface(
fields=[
'denoised_bold',
'smoothed_bold',
'bold_mask',
'temporal_mask',
# only used for CIFTI data if the anatomical workflow is enabled
Expand Down Expand Up @@ -195,61 +192,62 @@ def init_alff_wf(
]) # fmt:skip

if smoothing: # If we want to smooth
if file_format == 'nifti':
workflow.__desc__ = workflow.__desc__ + (
' The ALFF maps were smoothed with Nilearn using a Gaussian kernel '
f'(FWHM={str(smoothing)} mm).'
)
# Smooth via Nilearn
smooth_data = pe.Node(
Smooth(fwhm=smoothing),
name='niftismoothing',
)
workflow.connect([
(alff_compt, smooth_data, [('alff', 'in_file')]),
(smooth_data, outputnode, [('out_file', 'smoothed_alff')])
]) # fmt:skip

else: # If cifti
workflow.__desc__ = workflow.__desc__ + (
' The ALFF maps were smoothed with the Connectome Workbench using a Gaussian '
f'kernel (FWHM={str(smoothing)} mm).'
)
workflow.__desc__ += ' ALFF was also calculated from the smoothed, denoised BOLD data.'

# compute alff
compute_smoothed_alff = pe.Node(
ComputeALFF(
TR=TR,
low_pass=low_pass,
high_pass=high_pass,
n_threads=config.nipype.omp_nthreads,
),
mem_gb=mem_gb['bold'],
name='compute_smoothed_alff',
n_procs=config.nipype.omp_nthreads,
)
workflow.connect([
(inputnode, compute_smoothed_alff, [
('smoothed_bold', 'in_file'),
('bold_mask', 'mask'),
('temporal_mask', 'temporal_mask'),
]),
(compute_smoothed_alff, outputnode, [('alff', 'smoothed_alff')])
]) # fmt:skip

# Smooth via Connectome Workbench
sigma_lx = fwhm2sigma(smoothing) # Convert fwhm to standard deviation
# Get templates for each hemisphere
lh_midthickness = str(
get_template('fsLR', hemi='L', suffix='sphere', density='32k', raise_empty=True)[0]
)
rh_midthickness = str(
get_template('fsLR', hemi='R', suffix='sphere', density='32k', raise_empty=True)[0]
)
smooth_data = pe.Node(
CiftiSmooth(
sigma_surf=sigma_lx,
sigma_vol=sigma_lx,
direction='COLUMN',
right_surf=rh_midthickness,
left_surf=lh_midthickness,
num_threads=config.nipype.omp_nthreads,
),
name='ciftismoothing',
mem_gb=mem_gb['bold'],
n_procs=config.nipype.omp_nthreads,
)
# Plot the ALFF map
ds_report_smoothed_alff = pe.Node(
DerivativesDataSink(
source_file=name_source,
),
name='ds_report_smoothed_alff',
run_without_submitting=False,
)

# Always check the intent code in CiftiSmooth's output file
fix_cifti_intent = pe.Node(
FixCiftiIntent(),
name='fix_cifti_intent',
mem_gb=mem_gb['bold'],
if file_format == 'cifti':
plot_smoothed_alff = pe.Node(
PlotDenseCifti(base_desc='alff'),
name='plot_smoothed_alff',
)
workflow.connect([
(alff_compt, smooth_data, [('alff', 'in_file')]),
(smooth_data, fix_cifti_intent, [('out_file', 'in_file')]),
(fix_cifti_intent, outputnode, [('out_file', 'smoothed_alff')]),
(inputnode, plot_smoothed_alff, [
('lh_midthickness', 'lh_underlay'),
('rh_midthickness', 'rh_underlay'),
]),
(plot_smoothed_alff, ds_report_smoothed_alff, [('desc', 'desc')]),
]) # fmt:skip
ds_report_smoothed_alff.inputs.desc = 'alffSmoothedSurfacePlot'
else:
plot_smoothed_alff = pe.Node(
PlotNifti(name_source=name_source),
name='plot_smoothed_alff',
)
ds_report_smoothed_alff.inputs.desc = 'alffSmoothedVolumetricPlot'

workflow.connect([
(compute_smoothed_alff, plot_smoothed_alff, [('alff', 'in_file')]),
(plot_smoothed_alff, ds_report_smoothed_alff, [('out_file', 'in_file')]),
]) # fmt:skip

return workflow

Expand Down
3 changes: 2 additions & 1 deletion xcp_d/workflows/bold/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,8 @@ def init_postprocess_nifti_wf(
('outputnode.temporal_mask', 'inputnode.temporal_mask'),
]),
(denoise_bold_wf, alff_wf, [
('outputnode.denoised_interpolated_bold', 'inputnode.denoised_bold'),
('outputnode.censored_denoised_bold', 'inputnode.denoised_bold'),
('outputnode.smoothed_denoised_bold', 'inputnode.smoothed_bold'),
]),
]) # fmt:skip

Expand Down