Skip to content

Commit 2f028fd

Browse files
committed
New version of T54A
1 parent d849cc5 commit 2f028fd

File tree

3 files changed

+56
-35
lines changed

3 files changed

+56
-35
lines changed

narps_open/data/task.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,12 @@ def __init__(self, task_file=''):
1717

1818
# Load information from the task-MGT_bold.json file
1919
if task_file:
20-
task_information_file = task_file # For testing purpose only
21-
else:
22-
task_information_file = join(
20+
self.task_information_file = task_file # For testing purpose only
21+
else:
22+
self.task_information_file = join(
2323
Configuration()['directories']['dataset'], 'task-MGT_bold.json')
24-
with open(task_information_file, 'rb') as file:
24+
25+
with open(self.task_information_file, 'rb') as file:
2526
self.update(load(file))
2627

2728
# Compute derived information

narps_open/pipelines/team_T54A.py

Lines changed: 41 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from nipype import Workflow, Node, MapNode
1010
from nipype.algorithms.modelgen import SpecifyModel
1111
from nipype.interfaces.utility import IdentityInterface, Function, Split, Merge
12-
from nipype.interfaces.io import SelectFiles, DataSink
12+
from nipype.interfaces.io import SelectFiles, DataSink, DataGrabber
1313
from nipype.interfaces.fsl.aroma import ICA_AROMA
1414
from nipype.interfaces.fsl.preprocess import SUSAN, BET
1515
from nipype.interfaces.fsl.model import (
@@ -180,22 +180,25 @@ def get_preprocessing(self):
180180
mean_func_2.inputs.op_string = '-Tmean'
181181
preprocessing.connect(normalize_intensity, 'out_file', mean_func_2, 'in_file')
182182

183-
# ImageMaths - Perform temporal highpass filtering on the data
184-
def get_high_pass_filter_command(in_file):
183+
# Function get_high_pass_filter_command - Build command line for temporal highpass filter
184+
def get_high_pass_filter_command(in_file, repetition_time, high_pass_filter_cutoff):
185185
""" Create command line for high pass filtering using image maths """
186-
from narps_open import TaskInformation
187-
188-
high_pass_filter_cutoff = 100 #seconds
189-
repetition_time = float(TaskInformation()['RepetitionTime'])
190-
191186
return f'-bptf {high_pass_filter_cutoff / (2.0 * repetition_time)} -1 -add {in_file}'
192187

188+
high_pass_command = Node(Function(
189+
function = get_high_pass_filter_command,
190+
input_names = ['in_file', 'repetition_time', 'high_pass_filter_cutoff'],
191+
output_names = ['command']
192+
), name = 'high_pass_command')
193+
high_pass_command.inputs.high_pass_filter_cutoff = 100.0 #seconds
194+
high_pass_command.inputs.repetition_time = TaskInformation()['RepetitionTime']
195+
preprocessing.connect(mean_func_2, 'out_file', high_pass_command, 'in_file')
196+
197+
# ImageMaths - Perform temporal highpass filtering on the data
193198
high_pass_filter = Node(ImageMaths(), name = 'high_pass_filter')
194199
high_pass_filter.inputs.suffix = '_tempfilt'
195200
preprocessing.connect(normalize_intensity, 'out_file', high_pass_filter, 'in_file')
196-
preprocessing.connect(
197-
mean_func_2, ('out_file', get_high_pass_filter_command),
198-
high_pass_filter, 'op_string')
201+
preprocessing.connect(high_pass_command, 'command', high_pass_filter, 'op_string')
199202

200203
# DataSink Node - store the wanted results in the wanted repository
201204
data_sink = Node(DataSink(), name = 'data_sink')
@@ -240,7 +243,7 @@ def get_preprocessing_outputs(self):
240243
template = join(
241244
self.directories.output_dir, 'preprocessing',
242245
'_run_id_{run_id}_subject_id_{subject_id}',
243-
'sub-{subject_id}_task-MGT_run-{run_id}_bold_space-MNI152NLin2009cAsym_preproc_dtype_thresh_smooth_intnorm.nii.gz')
246+
'denoised_func_data_nonaggr_brain_smooth_intnorm_tempfilt.nii.gz')
244247

245248
return [template.format(**dict(zip(parameters.keys(), parameter_values)))\
246249
for parameter_values in parameter_sets]
@@ -329,7 +332,7 @@ def get_parameters_file(in_file):
329332
from pandas import read_csv, DataFrame
330333
from numpy import array, transpose
331334

332-
data_frame = read_csv(filepath, sep = '\t', header=0)
335+
data_frame = read_csv(in_file, sep = '\t', header=0)
333336
if 'NonSteadyStateOutlier00' in data_frame.columns:
334337
temp_list = array([
335338
data_frame['X'], data_frame['Y'], data_frame['Z'],
@@ -379,7 +382,7 @@ def get_run_level_analysis(self):
379382
# Preprocessed functional MRI
380383
'func' : join(self.directories.output_dir, 'preprocessing',
381384
'_run_id_{run_id}_subject_id_{subject_id}',
382-
'sub-{subject_id}_task-MGT_run-{run_id}_bold_space-MNI152NLin2009cAsym_preproc_dtype_thresh_smooth_intnorm.nii.gz'
385+
'denoised_func_data_nonaggr_brain_smooth_intnorm_tempfilt.nii.gz'
383386
),
384387
# Event file
385388
'events' : join('sub-{subject_id}', 'func',
@@ -429,14 +432,14 @@ def get_run_level_analysis(self):
429432
# FEATModel Node - Generate run level model
430433
model_generation = Node(FEATModel(), name = 'model_generation')
431434
run_level_analysis.connect(model_design, 'ev_files', model_generation, 'ev_files')
432-
run_level_analysis.connect(model_design, 'fsf_files', model_generation, 'fsf_files')
435+
run_level_analysis.connect(model_design, 'fsf_files', model_generation, 'fsf_file')
433436

434437
# FILMGLS Node - Estimate first level model
435438
model_estimate = Node(FILMGLS(), name = 'model_estimate')
436439
model_estimate.inputs.smooth_autocorr = True
437440
model_estimate.inputs.mask_size = 5
438441
model_estimate.inputs.threshold = 1000
439-
run_level_analysis.connect(smoothing_func, 'out_file', model_estimate, 'in_file')
442+
run_level_analysis.connect(select_files, 'func', model_estimate, 'in_file')
440443
run_level_analysis.connect(model_generation, 'con_file', model_estimate, 'tcon_file')
441444
run_level_analysis.connect(model_generation, 'design_file', model_estimate, 'design_file')
442445

@@ -500,6 +503,12 @@ def get_subject_level_analysis(self):
500503
Returns:
501504
- subject_level_analysis : nipype.WorkFlow
502505
"""
506+
# Create run level analysis workflow and connect its nodes
507+
subject_level = Workflow(
508+
base_dir = self.directories.working_dir,
509+
name = 'subject_level_analysis'
510+
)
511+
503512
# Infosource Node - To iterate on subject and runs
504513
information_source = Node(IdentityInterface(
505514
fields = ['subject_id', 'contrast_id']),
@@ -612,29 +621,34 @@ def get_subject_level_analysis(self):
612621
subject_level.connect(
613622
estimate_model, 'var_copes', data_sink, 'subject_level_analysis.@varcopes')
614623

615-
return subject_level_analysis
624+
return subject_level
616625

617626
def get_subject_level_outputs(self):
618627
""" Return the names of the files the subject level analysis is supposed to generate. """
619628

629+
# Copes, varcopes, stats
620630
parameters = {
621631
'contrast_id' : self.contrast_list,
622-
'subject_id' : self.subject_list,
632+
'subject_ev' : range(1, 1+len(self.subject_list))
623633
}
624634
parameter_sets = product(*parameters.values())
625-
output_dir = join(self.directories.output_dir, 'subject_level_analysis',
626-
'_contrast_id_{contrast_id}_subject_id_{subject_id}')
635+
output_dir = join(self.directories.output_dir, 'subject_level_analysis')
627636
templates = [
628-
join(output_dir, 'cope1.nii.gz'),
629-
join(output_dir, 'tstat1.nii.gz'),
630-
join(output_dir, 'varcope1.nii.gz'),
631-
join(output_dir, 'zstat1.nii.gz'),
632-
join(output_dir, 'sub-{subject_id}_task-MGT_run-01_bold_space-MNI152NLin2009cAsym_preproc_brain_mask_maths.nii.gz')
637+
join(output_dir, '_contrast_id_{contrast_id}', 'cope{subject_ev}.nii.gz'),
638+
join(output_dir, '_contrast_id_{contrast_id}', 'tstat{subject_ev}.nii.gz'),
639+
join(output_dir, '_contrast_id_{contrast_id}', 'varcope{subject_ev}.nii.gz'),
640+
join(output_dir, '_contrast_id_{contrast_id}', 'zstat{subject_ev}.nii.gz')
633641
]
634-
635-
return [template.format(**dict(zip(parameters.keys(), parameter_values)))\
642+
return_list = [template.format(**dict(zip(parameters.keys(), parameter_values)))\
636643
for parameter_values in parameter_sets for template in templates]
637644

645+
# Mask
646+
return_list.append(join(output_dir,
647+
f'sub-{self.subject_list[0]}_task-MGT_run-{self.run_list[0]}_bold_space-MNI152NLin2009cAsym_brainmask_merged_maths.nii.gz'
648+
))
649+
650+
return return_list
651+
638652
def get_one_sample_t_test_regressors(subject_list: list) -> dict:
639653
"""
640654
Create dictionary of regressors for one sample t-test group analysis.

narps_open/utils/singleton.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,14 @@ class SingletonMeta(type):
1919

2020
def __call__(cls, *args, **kwargs):
2121
""" Creating only one instance for the class 'cls' """
22+
23+
# Return only instance of class cls if the instance was already created
24+
if cls in cls._instances:
25+
return cls._instances[cls]
26+
27+
# Else, create the instance with thread safe mechanism
2228
with cls._lock:
23-
if cls not in cls._instances:
24-
instance = super().__call__(*args, **kwargs)
25-
cls._instances[cls] = instance
26-
return cls._instances[cls]
29+
instance = super().__call__(*args, **kwargs)
30+
cls._instances[cls] = instance
31+
32+
return cls._instances[cls]

0 commit comments

Comments
 (0)