Skip to content

Commit 245575d

Browse files
authored
[ENH] add --no-signal-scaling option (#298)
1 parent e9d8c2a commit 245575d

File tree

8 files changed

+66
-23
lines changed

8 files changed

+66
-23
lines changed

docs/workflows.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ Participant Workflow
2626
output_dir='.',
2727
preproc_img_list=[''],
2828
selected_confounds=[''],
29+
signal_scaling=0,
2930
smoothing_kernel=None)
3031

3132
The general workflow for a participant models the beta series
@@ -54,6 +55,7 @@ Least Squares- Separate (LSS)
5455
hrf_model='glover',
5556
high_pass=0.008,
5657
smoothing_kernel=0.0,
58+
signal_scaling=0,
5759
selected_confounds=[''])
5860

5961
nistats is used for modeling using the
@@ -75,6 +77,7 @@ Finite BOLD Response- Separate (FS)
7577
hrf_model='fir',
7678
high_pass=0.008,
7779
smoothing_kernel=0.0,
80+
signal_scaling=0,
7881
selected_confounds=[''])
7982

8083
Additionally, NiBetaSeries can be used to perform
@@ -97,6 +100,7 @@ Least Squares- All (LSA)
97100
hrf_model='glover',
98101
high_pass=0.008,
99102
smoothing_kernel=0.0,
103+
signal_scaling=0,
100104
selected_confounds=[''])
101105

102106
For completeness, NiBetaSeries also implements least squares- all (LSA),

src/nibetaseries/cli/run.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,10 @@ def get_parser():
102102
'are stored (i.e. non-essential files). '
103103
'This directory can be deleted once you are reasonably '
104104
'certain nibs finished as expected.')
105+
proc_opts.add_argument('--no-signal-scaling', action='store_true', default=False,
106+
help='do not scale every voxel with respect to time meaning'
107+
' beta estimates will be in the same units as the bold'
108+
' signal')
105109

106110
# Image Selection options
107111
bids_opts = parser.add_argument_group('Options for selecting images')
@@ -237,6 +241,12 @@ def main():
237241
else:
238242
atlas_img = atlas_lut = None
239243

244+
# check if --no-signal-scaling is set
245+
if opts.no_signal_scaling:
246+
signal_scaling = False
247+
else:
248+
signal_scaling = 0
249+
240250
# running participant level
241251
if opts.analysis_level == "participant":
242252
nibetaseries_participant_wf = init_nibetaseries_participant_wf(
@@ -252,6 +262,7 @@ def main():
252262
high_pass=opts.high_pass,
253263
output_dir=output_dir,
254264
run_label=opts.run_label,
265+
signal_scaling=signal_scaling,
255266
selected_confounds=opts.confounds,
256267
session_label=opts.session_label,
257268
smoothing_kernel=opts.smoothing_kernel,

src/nibetaseries/cli/tests/test_run.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,14 +36,15 @@ def test_conditional_arguments(monkeypatch):
3636
get_parser().parse_args(no_img)
3737

3838

39-
@pytest.mark.parametrize("use_atlas,estimator,fir_delays,hrf_model,part_label",
40-
[(True, 'lsa', None, 'spm', '01'),
41-
(False, 'lss', None, 'spm', 'sub-01'),
42-
(True, 'lss', [0, 1, 2, 3, 4], 'fir', None)])
39+
@pytest.mark.parametrize("use_atlas,estimator,fir_delays,hrf_model,part_label,use_signal_scaling",
40+
[(True, 'lsa', None, 'spm', '01', True),
41+
(False, 'lss', None, 'spm', 'sub-01', False),
42+
(True, 'lss', [0, 1, 2, 3, 4], 'fir', None, False)])
4343
def test_nibs(
4444
bids_dir, deriv_dir, sub_fmriprep, sub_metadata, bold_file, preproc_file,
4545
sub_events, confounds_file, brainmask_file, atlas_file, atlas_lut,
46-
estimator, fir_delays, hrf_model, monkeypatch, part_label, use_atlas):
46+
estimator, fir_delays, hrf_model, monkeypatch, part_label, use_atlas,
47+
use_signal_scaling):
4748
import sys
4849
bids_dir = str(bids_dir)
4950
out_dir = os.path.join(bids_dir, 'derivatives')
@@ -64,6 +65,8 @@ def test_nibs(
6465
out_dir,
6566
"participant",
6667
"-c", ".*derivative.*"]
68+
if use_signal_scaling:
69+
args.extend(["--no-signal-scaling"])
6770
if use_atlas:
6871
args.extend(["-a", str(atlas_file), "-l", str(atlas_lut)])
6972
if fir_delays:

src/nibetaseries/interfaces/nistats.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,9 @@ class LSSBetaSeriesInputSpec(BaseInterfaceInputSpec):
2727
desc="File that contains all events from the bold run")
2828
confounds_file = traits.Either(None, File(exists=True),
2929
desc="File that contains all usable confounds")
30+
signal_scaling = traits.Enum(False, 0,
31+
desc="Whether (0) or not (False) to scale each"
32+
" voxel's timeseries")
3033
selected_confounds = traits.Either(None, traits.List(),
3134
desc="Column names of the regressors to include")
3235
hrf_model = traits.String(desc="hemodynamic response model")
@@ -70,7 +73,7 @@ def _run_interface(self, runtime):
7073
hrf_model=self.inputs.hrf_model,
7174
mask=self.inputs.mask_file,
7275
smoothing_fwhm=self.inputs.smoothing_kernel,
73-
signal_scaling=0,
76+
signal_scaling=self.inputs.signal_scaling,
7477
high_pass=self.inputs.high_pass,
7578
drift_model='cosine',
7679
verbose=1,
@@ -156,6 +159,9 @@ class LSABetaSeriesInputSpec(BaseInterfaceInputSpec):
156159
desc="File that contains all events from the bold run")
157160
confounds_file = traits.Either(None, File(exists=True),
158161
desc="File that contains all usable confounds")
162+
signal_scaling = traits.Enum(False, 0,
163+
desc="Whether (0) or not (False) to scale each"
164+
" voxel's timeseries")
159165
selected_confounds = traits.Either(None, traits.List(),
160166
desc="Column names of the regressors to include")
161167
hrf_model = traits.String(desc="hemodynamic response model")
@@ -196,7 +202,7 @@ def _run_interface(self, runtime):
196202
hrf_model=self.inputs.hrf_model,
197203
mask=self.inputs.mask_file,
198204
smoothing_fwhm=self.inputs.smoothing_kernel,
199-
signal_scaling=0,
205+
signal_scaling=self.inputs.signal_scaling,
200206
high_pass=self.inputs.high_pass,
201207
drift_model='cosine',
202208
verbose=1,

src/nibetaseries/interfaces/tests/test_nistats.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ def test_lss_beta_series(sub_metadata, preproc_file, sub_events,
4646
events_file=str(sub_events),
4747
confounds_file=str(confounds_file),
4848
selected_confounds=selected_confounds,
49+
signal_scaling=0,
4950
hrf_model=hrf_model,
5051
smoothing_kernel=None,
5152
high_pass=0.008)
@@ -98,6 +99,7 @@ def test_fs_beta_series(sub_metadata, preproc_file, sub_events,
9899
events_file=str(sub_events),
99100
confounds_file=str(confounds_file),
100101
selected_confounds=selected_confounds,
102+
signal_scaling=0,
101103
hrf_model=hrf_model,
102104
fir_delays=fir_delays,
103105
smoothing_kernel=None,
@@ -149,6 +151,7 @@ def test_lsa_beta_series(sub_metadata, preproc_file, sub_events,
149151
mask_file=mask_file,
150152
events_file=str(sub_events),
151153
confounds_file=str(confounds_file),
154+
signal_scaling=0,
152155
selected_confounds=selected_confounds,
153156
hrf_model=hrf_model,
154157
smoothing_kernel=None,

src/nibetaseries/workflows/base.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@
3232
def init_nibetaseries_participant_wf(
3333
estimator, atlas_img, atlas_lut, bids_dir,
3434
database_path, derivatives_pipeline_dir, exclude_description_label,
35-
fir_delays, hrf_model, high_pass,
36-
output_dir, run_label, selected_confounds, session_label, smoothing_kernel,
35+
fir_delays, hrf_model, high_pass, output_dir, run_label,
36+
selected_confounds, session_label, signal_scaling, smoothing_kernel,
3737
space_label, subject_list, task_label, description_label, work_dir,
3838
):
3939

@@ -69,6 +69,8 @@ def init_nibetaseries_participant_wf(
6969
Include bold series containing this run label
7070
selected_confounds : list
7171
List of confounds to be included in regression
72+
signal_scaling : False or 0
73+
Whether (0) or not (False) to scale each voxel's timeseries
7274
session_label : str or None
7375
Include bold series containing this session label
7476
smoothing_kernel : float or None
@@ -173,6 +175,7 @@ def init_nibetaseries_participant_wf(
173175
output_dir=output_dir,
174176
preproc_img_list=preproc_img_list,
175177
selected_confounds=selected_confounds,
178+
signal_scaling=signal_scaling,
176179
smoothing_kernel=smoothing_kernel,
177180
)
178181

@@ -191,8 +194,8 @@ def init_nibetaseries_participant_wf(
191194
def init_single_subject_wf(
192195
estimator, atlas_img, atlas_lut, bold_metadata_list, brainmask_list,
193196
confound_tsv_list, events_tsv_list, fir_delays, hrf_model, high_pass,
194-
name, output_dir,
195-
preproc_img_list, selected_confounds, smoothing_kernel
197+
name, output_dir, preproc_img_list, selected_confounds,
198+
signal_scaling, smoothing_kernel,
196199
):
197200
"""
198201
This workflow completes the generation of the betaseries files
@@ -217,6 +220,7 @@ def init_single_subject_wf(
217220
output_dir='.',
218221
preproc_img_list=[''],
219222
selected_confounds=[''],
223+
signal_scaling=0,
220224
smoothing_kernel=0.0)
221225
222226
Parameters
@@ -249,6 +253,8 @@ def init_single_subject_wf(
249253
list of preprocessed bold files
250254
selected_confounds : list or None
251255
the list of confounds to be included in regression
256+
signal_scaling : False or 0
257+
Whether (0) or not (False) to scale each voxel's timeseries
252258
smoothing_kernel : float or None
253259
the size of the smoothing kernel (full width/half max) applied to the bold file (in mm)
254260
@@ -308,6 +314,7 @@ def init_single_subject_wf(
308314
hrf_model=hrf_model,
309315
high_pass=high_pass,
310316
selected_confounds=selected_confounds,
317+
signal_scaling=signal_scaling,
311318
smoothing_kernel=smoothing_kernel)
312319

313320
# initialize the analysis workflow

src/nibetaseries/workflows/model.py

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,9 @@ def init_betaseries_wf(name="betaseries_wf",
2323
fir_delays=None,
2424
hrf_model='glover',
2525
high_pass=0.0078125,
26-
smoothing_kernel=None,
26+
signal_scaling=0,
2727
selected_confounds=None,
28+
smoothing_kernel=None,
2829
):
2930
"""Derives Beta Series Maps
3031
This workflow derives beta series maps from a bold file.
@@ -54,10 +55,12 @@ def init_betaseries_wf(name="betaseries_wf",
5455
high_pass : float
5556
high pass filter to apply to bold (in Hertz).
5657
Reminder - frequencies _lower_ than this number are kept.
57-
smoothing_kernel : float or None
58-
The size of the smoothing kernel (full width/half max) applied to the bold file (in mm)
5958
selected_confounds : list or None
6059
the list of confounds to be included in regression.
60+
signal_scaling : False or 0
61+
Whether (0) or not (False) to scale each voxel's timeseries
62+
smoothing_kernel : float or None
63+
The size of the smoothing kernel (full width/half max) applied to the bold file (in mm)
6164
6265
Inputs
6366
------
@@ -89,6 +92,7 @@ def init_betaseries_wf(name="betaseries_wf",
8992
hrf=hrf_model,
9093
hpf=high_pass,
9194
selected_confounds=selected_confounds,
95+
signal_scaling=signal_scaling,
9296
estimator=estimator,
9397
fir_delays=fir_delays,
9498
)
@@ -105,13 +109,15 @@ def init_betaseries_wf(name="betaseries_wf",
105109
betaseries_node = pe.Node(LSSBetaSeries(
106110
fir_delays=fir_delays,
107111
selected_confounds=selected_confounds,
112+
signal_scaling=signal_scaling,
108113
hrf_model=hrf_model,
109114
smoothing_kernel=smoothing_kernel,
110115
high_pass=high_pass),
111116
name='betaseries_node')
112117
elif estimator == 'lsa':
113118
betaseries_node = pe.Node(LSABetaSeries(
114119
selected_confounds=selected_confounds,
120+
signal_scaling=signal_scaling,
115121
hrf_model=hrf_model,
116122
smoothing_kernel=smoothing_kernel,
117123
high_pass=high_pass),
@@ -134,16 +140,17 @@ def init_betaseries_wf(name="betaseries_wf",
134140

135141

136142
def gen_wf_description(nistats_ver, fwhm, hrf, hpf,
137-
selected_confounds, estimator,
138-
fir_delays=None):
143+
selected_confounds, signal_scaling,
144+
estimator, fir_delays=None):
139145
from textwrap import dedent
140146

141147
smooth_str = ('smoothed with a Gaussian kernel with a FWHM of {fwhm} mm,'
142148
' '.format(fwhm=fwhm)
143149
if fwhm != 0. else '')
150+
signal_scale_str = ', and mean-scaled over time.' if signal_scaling == 0 else '.'
144151

145-
preproc_str = ('Prior to modeling, preprocessed data were {smooth_str}masked,'
146-
'and mean-scaled over time.'.format(smooth_str=smooth_str))
152+
preproc_str = ('Prior to modeling, preprocessed data were {smooth_str}masked{signal_scale_str}'
153+
.format(smooth_str=smooth_str, signal_scale_str=signal_scale_str))
147154

148155
beta_series_tmp = dedent("""
149156
After fitting {n_models} model, the parameter estimate (i.e., beta) map

src/nibetaseries/workflows/tests/test_base.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,14 @@
88
from ..base import init_nibetaseries_participant_wf, _check_bs_len
99

1010

11-
@pytest.mark.parametrize("estimator,fir_delays,hrf_model",
12-
[('lsa', None, 'spm'),
13-
('lss', None, 'spm'),
14-
('lss', [0, 1, 2, 3, 4], 'fir')])
11+
@pytest.mark.parametrize("estimator,fir_delays,hrf_model,signal_scaling",
12+
[('lsa', None, 'spm', 0),
13+
('lss', None, 'spm', False),
14+
('lss', [0, 1, 2, 3, 4], 'fir', False)])
1515
def test_valid_init_nibetaseries_participant_wf(
1616
bids_dir, deriv_dir, sub_fmriprep, sub_top_metadata, bold_file, preproc_file,
1717
sub_events, confounds_file, brainmask_file, atlas_file, atlas_lut, bids_db_file,
18-
estimator, fir_delays, hrf_model):
18+
estimator, fir_delays, hrf_model, signal_scaling):
1919

2020
output_dir = op.join(str(bids_dir), 'derivatives', 'atlasCorr')
2121
work_dir = op.join(str(bids_dir), 'derivatives', 'work')
@@ -43,6 +43,7 @@ def test_valid_init_nibetaseries_participant_wf(
4343
run_label=None,
4444
selected_confounds=['white_matter', 'csf'],
4545
session_label=None,
46+
signal_scaling=signal_scaling,
4647
smoothing_kernel=None,
4748
space_label=None,
4849
subject_list=["01"],
@@ -87,6 +88,7 @@ def test_filters_init_nibetaseries_participant_wf(
8788
run_label=run_label,
8889
selected_confounds=['white_matter', 'csf'],
8990
session_label=session_label,
91+
signal_scaling=False,
9092
smoothing_kernel=None,
9193
space_label=space_label,
9294
subject_list=["01"],

0 commit comments

Comments
 (0)