1010from nipype .algorithms .modelgen import SpecifyModel
1111from nipype .interfaces .utility import IdentityInterface , Function , Split , Merge
1212from nipype .interfaces .io import SelectFiles , DataSink
13- from nipype .interfaces .fsl .maths import ImageMaths , ImageStats , MultiImageMaths
13+ from nipype .interfaces .fsl .maths import MultiImageMaths
1414from nipype .interfaces .fsl .preprocess import SUSAN
1515from nipype .interfaces .fsl .model import (
1616 Level1Design , FEATModel , L2Model , FLAMEO , FILMGLS , MultipleRegressDesign ,
1717 FSLCommand , Cluster
1818 )
19- from nipype .interfaces .fsl .utils import Merge as FSLMerge
19+ from nipype .interfaces .fsl .utils import ImageMaths , ImageStats , Merge as FSLMerge
2020
2121from narps_open .utils .configuration import Configuration
2222from narps_open .pipelines import Pipeline
2323from narps_open .data .task import TaskInformation
2424from narps_open .data .participants import get_group
25- from narps_open .core .common import list_intersection , elements_in_string , clean_list
25+ from narps_open .core .common import (
26+ remove_parent_directory , list_intersection , elements_in_string , clean_list
27+ )
2628from narps_open .core .interfaces import InterfaceFactory
2729
2830# Setup FSL
@@ -94,12 +96,12 @@ def get_preprocessing(self):
9496 # ImageStats - Compute the median value of each time point
9597 # (only because it's needed by SUSAN)
9698 median_value = Node (ImageStats (), name = 'median_value' )
97- median_value .op_string = '-k %s -p 50'
99+ median_value .inputs . op_string = '-k %s -p 50'
98100 preprocessing .connect (select_files , 'func' , median_value , 'in_file' )
99101 preprocessing .connect (select_files , 'mask' , median_value , 'mask_file' )
100102
101103 # Merge - Merge the median values with the mean functional images into a coupled list
102- merge_median = Node (Merge (2 , axis = 'hstack' ), name = 'merge_median' )
104+ merge_median = Node (Merge (2 ), name = 'merge_median' )
103105 preprocessing .connect (mean_func , 'out_file' , merge_median , 'in1' )
104106 preprocessing .connect (median_value , 'out_stat' , merge_median , 'in2' )
105107
@@ -111,13 +113,13 @@ def get_preprocessing(self):
111113 get_brightness_threshold = lambda median : 0.75 * median
112114
113115 # Define a function to get the usans for SUSAN
114- get_usans = lambda value : [tuple ([val [0 ], 0.75 * val [1 ]])]
116+ get_usans = lambda value : [tuple ([value [0 ], 0.75 * value [1 ]])]
115117
116118 preprocessing .connect (mask_func , 'out_file' , smooth_func , 'in_file' )
117119 preprocessing .connect (
118120 median_value , ('out_stat' , get_brightness_threshold ),
119121 smooth_func , 'brightness_threshold' )
120- preprocessing .connect (merge_median , ('out' , getusans ), smooth_func , 'usans' )
122+ preprocessing .connect (merge_median , ('out' , get_usans ), smooth_func , 'usans' )
121123
122124 # TODO : Mask the smoothed data ?
123125 """
@@ -135,7 +137,7 @@ def get_preprocessing(self):
135137 # ImageMaths - Scale each time point so that its median value is 10000
136138 normalize_intensity = Node (ImageMaths (), name = 'normalize_intensity' )
137139 normalize_intensity .inputs .suffix = '_intnorm'
138- preprocessing .connect (smooth_func , 'out_file ' , normalize_intensity , 'in_file' )
140+ preprocessing .connect (smooth_func , 'smoothed_file ' , normalize_intensity , 'in_file' )
139141 preprocessing .connect (
140142 median_value , ('out_stat' , get_intensity_normalization_scale ),
141143 normalize_intensity , 'op_string' )
@@ -152,12 +154,13 @@ def get_preprocessing(self):
152154 if Configuration ()['pipelines' ]['remove_unused_data' ]:
153155
154156 # Merge Node - Merge func file names to be removed after datasink node is performed
155- merge_removable_files = Node (Merge (4 ), name = 'merge_removable_files' )
157+ merge_removable_files = Node (Merge (5 ), name = 'merge_removable_files' )
156158 merge_removable_files .inputs .ravel_inputs = True
157159 preprocessing .connect (func_to_float , 'out_file' , merge_removable_files , 'in1' )
158160 preprocessing .connect (mask_func , 'out_file' , merge_removable_files , 'in2' )
159161 preprocessing .connect (mean_func , 'out_file' , merge_removable_files , 'in3' )
160- preprocessing .connect (smooth_func , 'out_file' , merge_removable_files , 'in4' )
162+ preprocessing .connect (smooth_func , 'smoothed_file' , merge_removable_files , 'in4' )
163+ preprocessing .connect (normalize_intensity , 'out_file' , merge_removable_files , 'in5' )
161164
162165 # Function Nodes remove_files - Remove sizeable func files once they aren't needed
163166 remove_dirs = MapNode (Function (
@@ -181,8 +184,8 @@ def get_preprocessing_outputs(self):
181184 parameter_sets = product (* parameters .values ())
182185 template = join (
183186 self .directories .output_dir , 'preprocessing' ,
184- '_subject_id_{subject_id}' , ' _run_id_{run_id}' ,
185- 'wc2sub -{subject_id}_T1w .nii' )
187+ '_run_id_{run_id}_subject_id_{subject_id }' ,
188+ 'sub -{subject_id}_task-MGT_run-{run_id}_bold_space-MNI152NLin2009cAsym_preproc_dtype_thresh_smooth_intnorm .nii.gz ' )
186189
187190 return [template .format (** dict (zip (parameters .keys (), parameter_values )))\
188191 for parameter_values in parameter_sets ]
0 commit comments