From 924ecef66386b61e5bef1c4a712aa16501d62d38 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Tue, 26 Mar 2024 17:15:42 -0400 Subject: [PATCH 01/58] Make command line option for fs ingress --- CPAC/pipeline/engine.py | 113 ++++++++++++++++++++++------------------ CPAC/pipeline/schema.py | 1 + dev/docker_data/run.py | 8 +++ 3 files changed, 70 insertions(+), 52 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 3675c4035d..c5aee5834d 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1753,61 +1753,70 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): - if 'anat' not in data_paths: + if not cfg.pipeline_setup['freesurfer_dir']: print('No FreeSurfer data present.') return rpool + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + if os.path.exists(os.path.join(fs_path, part_id)): + fs_path = os.path.join(fs_path, part_id) + print(fs_path) + if not os.path.exists(fs_path): + if 'sub' in part_id: + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) + else: + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], 'sub-', part_id) + if not os.path.exists(fs_path): + print(f'No FreeSurfer data found for subject {part_id}') + return rpool + fs_ingress = create_general_datasource('gather_freesurfer_dir') + fs_ingress.inputs.inputnode.set( + unique_id=unique_id, + data=fs_path, + creds_path=data_paths['creds_path'], + dl_dir=cfg.pipeline_setup['working_directory']['path']) + rpool.set_data("freesurfer-subject-dir", fs_ingress, 'outputspec.data', + {}, "", "freesurfer_config_ingress") + + recon_outs = { + 'pipeline-fs_raw-average': 'mri/rawavg.mgz', + 'pipeline-fs_subcortical-seg': 'mri/aseg.mgz', + 'pipeline-fs_brainmask': 'mri/brainmask.mgz', + 'pipeline-fs_wmparc': 'mri/wmparc.mgz', + 'pipeline-fs_T1': 'mri/T1.mgz', + 'pipeline-fs_hemi-L_desc-surface_curv': 'surf/lh.curv', + 'pipeline-fs_hemi-R_desc-surface_curv': 'surf/rh.curv', + 'pipeline-fs_hemi-L_desc-surfaceMesh_pial': 'surf/lh.pial', + 'pipeline-fs_hemi-R_desc-surfaceMesh_pial': 'surf/rh.pial', + 'pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm': 'surf/lh.smoothwm', + 'pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm': 'surf/rh.smoothwm', + 'pipeline-fs_hemi-L_desc-surfaceMesh_sphere': 'surf/lh.sphere', + 'pipeline-fs_hemi-R_desc-surfaceMesh_sphere': 'surf/rh.sphere', + 'pipeline-fs_hemi-L_desc-surfaceMap_sulc': 'surf/lh.sulc', + 'pipeline-fs_hemi-R_desc-surfaceMap_sulc': 'surf/rh.sulc', + 'pipeline-fs_hemi-L_desc-surfaceMap_thickness': 'surf/lh.thickness', + 'pipeline-fs_hemi-R_desc-surfaceMap_thickness': 'surf/rh.thickness', + 'pipeline-fs_hemi-L_desc-surfaceMap_volume': 'surf/lh.volume', + 'pipeline-fs_hemi-R_desc-surfaceMap_volume': 'surf/rh.volume', + 'pipeline-fs_hemi-L_desc-surfaceMesh_white': 'surf/lh.white', + 'pipeline-fs_hemi-R_desc-surfaceMesh_white': 'surf/rh.white', + 'pipeline-fs_xfm': 'mri/transforms/talairach.lta' + } - if 'freesurfer_dir' in data_paths['anat']: - fs_ingress = create_general_datasource('gather_freesurfer_dir') - fs_ingress.inputs.inputnode.set( - unique_id=unique_id, - data=data_paths['anat']['freesurfer_dir'], - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data("freesurfer-subject-dir", fs_ingress, 'outputspec.data', - {}, "", "freesurfer_config_ingress") - - recon_outs = { - 'pipeline-fs_raw-average': 'mri/rawavg.mgz', - 'pipeline-fs_subcortical-seg': 'mri/aseg.mgz', - 'pipeline-fs_brainmask': 'mri/brainmask.mgz', - 'pipeline-fs_wmparc': 'mri/wmparc.mgz', - 'pipeline-fs_T1': 'mri/T1.mgz', - 'pipeline-fs_hemi-L_desc-surface_curv': 'surf/lh.curv', - 'pipeline-fs_hemi-R_desc-surface_curv': 'surf/rh.curv', - 'pipeline-fs_hemi-L_desc-surfaceMesh_pial': 'surf/lh.pial', - 'pipeline-fs_hemi-R_desc-surfaceMesh_pial': 'surf/rh.pial', - 'pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm': 'surf/lh.smoothwm', - 'pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm': 'surf/rh.smoothwm', - 'pipeline-fs_hemi-L_desc-surfaceMesh_sphere': 'surf/lh.sphere', - 'pipeline-fs_hemi-R_desc-surfaceMesh_sphere': 'surf/rh.sphere', - 'pipeline-fs_hemi-L_desc-surfaceMap_sulc': 'surf/lh.sulc', - 'pipeline-fs_hemi-R_desc-surfaceMap_sulc': 'surf/rh.sulc', - 'pipeline-fs_hemi-L_desc-surfaceMap_thickness': 'surf/lh.thickness', - 'pipeline-fs_hemi-R_desc-surfaceMap_thickness': 'surf/rh.thickness', - 'pipeline-fs_hemi-L_desc-surfaceMap_volume': 'surf/lh.volume', - 'pipeline-fs_hemi-R_desc-surfaceMap_volume': 'surf/rh.volume', - 'pipeline-fs_hemi-L_desc-surfaceMesh_white': 'surf/lh.white', - 'pipeline-fs_hemi-R_desc-surfaceMesh_white': 'surf/rh.white', - 'pipeline-fs_xfm': 'mri/transforms/talairach.lta' - } - - for key, outfile in recon_outs.items(): - fullpath = os.path.join(data_paths['anat']['freesurfer_dir'], - outfile) - if os.path.exists(fullpath): - fs_ingress = create_general_datasource(f'gather_fs_{key}_dir') - fs_ingress.inputs.inputnode.set( - unique_id=unique_id, - data=fullpath, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data(key, fs_ingress, 'outputspec.data', - {}, "", f"fs_{key}_ingress") - else: - warnings.warn(str( - LookupError("\n[!] Path does not exist for " - f"{fullpath}.\n"))) + for key, outfile in recon_outs.items(): + fullpath = os.path.join(fs_path, outfile) + if os.path.exists(fullpath): + fs_ingress = create_general_datasource(f'gather_fs_{key}_dir') + fs_ingress.inputs.inputnode.set( + unique_id=unique_id, + data=fullpath, + creds_path=data_paths['creds_path'], + dl_dir=cfg.pipeline_setup['working_directory']['path']) + rpool.set_data(key, fs_ingress, 'outputspec.data', + {}, "", f"fs_{key}_ingress") + else: + warnings.warn(str( + LookupError("\n[!] Path does not exist for " + f"{fullpath}.\n"))) return rpool diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index b1ebc7d947..60fd1d9d77 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -425,6 +425,7 @@ def sanitize(filename): 'Debugging': { 'verbose': bool1_1, }, + 'freesurfer_dir': str, 'outdir_ingress': { 'run': bool1_1, 'Template': Maybe(str), diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index 344f7206c3..acdbbfbb02 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -343,6 +343,11 @@ def run_main(): 'need to bind the port using the Docker ' 'flag "-p".', action='store_true') + + parser.add_argument('--freesurfer_dir', '--freesurfer-dir', + help='Specify path to pre-computed FreeSurfer outputs ' + 'to pull into C-PAC run', + default=False) # get the command line arguments args = parser.parse_args( @@ -641,6 +646,9 @@ def run_main(): .format(c['pipeline_setup']['system_config'][ 'num_participants_at_once'])) + if args.freesurfer_dir: + c['pipeline_setup']['freesurfer_dir'] = args.freesurfer_dir + if not args.data_config_file: print("Input directory: {0}".format(bids_dir)) From d4335139800b282f0e50df0d33511f00a6a10f38 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Mon, 1 Apr 2024 11:18:53 -0400 Subject: [PATCH 02/58] Make filename parsing more generalizable --- CPAC/pipeline/engine.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index c5aee5834d..3c671c56f4 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1756,18 +1756,26 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, if not cfg.pipeline_setup['freesurfer_dir']: print('No FreeSurfer data present.') return rpool + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) - if os.path.exists(os.path.join(fs_path, part_id)): - fs_path = os.path.join(fs_path, part_id) - print(fs_path) if not os.path.exists(fs_path): if 'sub' in part_id: fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) else: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], 'sub-', part_id) + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], ('sub-' + part_id)) + + # patch for flo-specific data if not os.path.exists(fs_path): - print(f'No FreeSurfer data found for subject {part_id}') - return rpool + subj_ses = part_id + '-' + ses_id + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], subj_ses) + if not os.path.exists(fs_path): + print(f'No FreeSurfer data found for subject {part_id}') + return rpool + + # Check for double nested subj names + if os.path.exists(os.path.join(fs_path, os.path.basename(fs_path))): + fs_path = os.path.join(fs_path, part_id) + fs_ingress = create_general_datasource('gather_freesurfer_dir') fs_ingress.inputs.inputnode.set( unique_id=unique_id, From 20572de56d66664bb718e0919766eabfeb2c6f36 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Tue, 16 Apr 2024 11:18:52 -0400 Subject: [PATCH 03/58] Change reg workflow to produce consistent outputs --- CPAC/registration/registration.py | 72 ++++++++++--------------------- 1 file changed, 23 insertions(+), 49 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8658aae219..9373a1f7a5 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1421,15 +1421,9 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, if opt == 'FSL': - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: - fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg( - f'anat_mni_fnirt_register{symm}' - ) - else: - fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( - f'anat_mni_fnirt_register{symm}' - ) + fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( + f'anat_mni_fnirt_register{symm}' + ) wf.connect(inputNode, 'input_brain', fnirt_reg_anat_mni, 'inputspec.input_brain') @@ -1454,33 +1448,22 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, wf.connect(inputNode, 'fnirt_config', fnirt_reg_anat_mni, 'inputspec.fnirt_config') - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: - # NOTE: this is an UPDATE because of the opt block above - added_outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_brain'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm') - } - outputs.update(added_outputs) - else: - # NOTE: this is an UPDATE because of the opt block above - added_outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_brain'), - f'space-{sym}template_desc-head_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_head'), - f'space-{sym}template_desc-{orig}_mask': ( - fnirt_reg_anat_mni, 'outputspec.output_mask'), - f'space-{sym}template_desc-T1wT2w_biasfield': ( - fnirt_reg_anat_mni, 'outputspec.output_biasfield'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_warp': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_warp') - } - outputs.update(added_outputs) + # NOTE: this is an UPDATE because of the opt block above + added_outputs = { + f'space-{sym}template_desc-preproc_{orig}': ( + fnirt_reg_anat_mni, 'outputspec.output_brain'), + f'space-{sym}template_desc-head_{orig}': ( + fnirt_reg_anat_mni, 'outputspec.output_head'), + f'space-{sym}template_desc-{orig}_mask': ( + fnirt_reg_anat_mni, 'outputspec.output_mask'), + f'space-{sym}template_desc-T1wT2w_biasfield': ( + fnirt_reg_anat_mni, 'outputspec.output_biasfield'), + f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( + fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm'), + f'from-{orig}_to-{sym}{tmpl}template_mode-image_warp': ( + fnirt_reg_anat_mni, 'outputspec.nonlinear_warp') + } + outputs.update(added_outputs) return (wf, outputs) @@ -2030,20 +2013,11 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): node, out = connect wf.connect(node, out, fsl, 'inputspec.input_brain') - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: - - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') - - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') - else: - node, out = strat_pool.get_data('FNIRT-T1w-brain-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data('T1w-brain-template') + wf.connect(node, out, fsl, 'inputspec.reference_brain') - node, out = strat_pool.get_data('FNIRT-T1w-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data('T1w-template') + wf.connect(node, out, fsl, 'inputspec.reference_head') node, out = strat_pool.get_data(["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"]) From 9f81479a3b233d2fbd8a7fb02aabeac83e83db16 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Wed, 17 Apr 2024 10:35:04 -0400 Subject: [PATCH 04/58] Remove restore-brain option from pipeline config --- CPAC/pipeline/schema.py | 1 - CPAC/registration/registration.py | 10 ++-------- CPAC/resources/configs/pipeline_config_blank.yml | 4 ---- CPAC/resources/configs/pipeline_config_default.yml | 4 ---- 4 files changed, 2 insertions(+), 17 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 60fd1d9d77..9f903f1cad 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -641,7 +641,6 @@ def sanitize(filename): 'functional_registration': { 'coregistration': { 'run': bool1_1, - 'reference': In({'brain', 'restore-brain'}), 'interpolation': In({'trilinear', 'sinc', 'spline'}), 'using': str, 'input': str, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9373a1f7a5..2d0ecaf926 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2913,7 +2913,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ), ( "desc-preproc_T1w", - "desc-restore-brain_T1w", + ["desc-restore-brain_T1w", "desc-preproc_T1w"], "desc-preproc_T2w", "desc-preproc_T2w", "T2w", @@ -2979,13 +2979,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('sbref') wf.connect(node, out, func_to_anat, 'inputspec.func') - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['reference'] == 'brain': - # TODO: use JSON meta-data to confirm - node, out = strat_pool.get_data('desc-preproc_T1w') - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['reference'] == 'restore-brain': - node, out = strat_pool.get_data('desc-restore-brain_T1w') + node, out = strat_pool.get_data(['desc-restore-brain_T1w', 'desc-preproc_T1w']) wf.connect(node, out, func_to_anat, 'inputspec.anat') if diff_complete: diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index f4c049b1b5..67e1277c79 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -740,10 +740,6 @@ registration_workflows: # It is not necessary to change this path unless you intend to use non-standard MNI registration. bbr_schedule: $FSLDIR/etc/flirtsch/bbr.sch - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: brain - # Choose FSL or ABCD as coregistration method using: FSL diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 71609ce58d..cb0bca639c 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -759,10 +759,6 @@ registration_workflows: run: On - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: brain - # Choose FSL or ABCD as coregistration method using: FSL From 68e3c41fa21ddc05e023d8d5ab1625622a65936d Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Wed, 17 Apr 2024 13:04:25 -0400 Subject: [PATCH 05/58] Modified warp_timeseries_to_T1template_abcd to create space-template_res-bold_desc-brain_T1w if needed --- CPAC/registration/registration.py | 88 ++++++++++++++++++++++++------- CPAC/surface/surf_preproc.py | 1 - 2 files changed, 70 insertions(+), 19 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8658aae219..9dc45c0eea 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1348,7 +1348,7 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w"): - + outputs={} wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -3766,8 +3766,14 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_res-bold_desc-brain_T1w", "space-template_desc-bold_mask", "T1w-brain-template-funcreg", + 'space-template_desc-preproc_T1w', + 'T1w-template-funcreg', + 'space-template_desc-T1w_mask', + 'desc-preproc_bold', ], outputs={ + "space-template_res-bold_desc-brain_T1w": { + "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3816,8 +3822,38 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, convert_func_to_standard_warp, 'warp2') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_func_to_standard_warp, 'reference') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_func_to_standard_warp, 'reference') + else: + anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_in_standard_{pipe_num}') + anat_brain_to_func_res.inputs.interp = 'spline' + anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + node, out = strat_pool.get_data('space-template_desc-preproc_T1w') + wf.connect(node, out, anat_brain_to_func_res, 'in_file') + + node, out = strat_pool.get_data('T1w-template-funcreg') + wf.connect(node, out, anat_brain_to_func_res, 'ref_file') + + wf.connect(anat_brain_to_func_res, 'out_file', + convert_func_to_standard_warp, 'reference') + # Create brain masks in this space from the FreeSurfer output (changing resolution) + # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz + anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_mask_in_standard_{pipe_num}') + + anat_brain_mask_to_func_res.inputs.interp = 'nn' + anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + node, out = strat_pool.get_data('space-template_desc-T1w_mask') + wf.connect(node, out, anat_brain_mask_to_func_res, 'in_file') + + wf.connect(anat_brain_to_func_res, 'out_file', + anat_brain_mask_to_func_res, 'ref_file') # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 @@ -3876,8 +3912,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_registration_warp, 'reference') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_registration_warp, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', convert_registration_warp, 'reference') wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') @@ -3909,9 +3948,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, - applywarp_func_to_standard, 'ref_file') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_func_to_standard, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_to_standard, 'ref_file') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), @@ -3927,9 +3968,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, - applywarp_func_mask_to_standard, 'ref_file') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_func_mask_to_standard, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_mask_to_standard, 'ref_file') ### Loop ends! ### @@ -3969,8 +4012,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_dc_warp, 'reference') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_dc_warp, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', convert_dc_warp, 'reference') wf.connect(multiply_func_roi_by_zero, 'out_file', convert_dc_warp, 'warp1') @@ -3988,8 +4034,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('motion-basefile') wf.connect(node, out, applywarp_scout, 'in_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_scout, 'ref_file') + if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_scout, 'reference') + else: + wf.connect(anat_brain_to_func_res, 'out_file', applywarp_scout, 'ref_file') wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') @@ -3998,9 +4047,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None merge_func_mask = pe.Node(util.Merge(2), name=f'merge_func_mask_{pipe_num}') - node, out = strat_pool.get_data('space-template_desc-bold_mask') - wf.connect(node, out, merge_func_mask, 'in1') - + if strat_pool.check_rpool('space-template_desc-bold_mask'): + node, out = strat_pool.get_data('space-template_desc-bold_mask') + wf.connect(node, out, merge_func_mask, 'in1') + else: + wf.connect(anat_brain_mask_to_func_res, 'out_file', merge_func_mask, 'in1') wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), @@ -4029,6 +4080,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None extract_scout_brain, 'operand_files') outputs = { + 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py index 2b0cb64741..806fbe486c 100755 --- a/CPAC/surface/surf_preproc.py +++ b/CPAC/surface/surf_preproc.py @@ -573,7 +573,6 @@ def run_surface(post_freesurfer_folder, "hemi-R_space-native_white", ], ) - def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): surf = pe.Node(util.Function(input_names=['post_freesurfer_folder', From 03a503c0370fa9915b9b841b0bd96e7eda3aa95b Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Wed, 17 Apr 2024 14:30:42 -0400 Subject: [PATCH 06/58] Remove coregistration -> reference option --- CPAC/resources/configs/pipeline_config_abcd-options.yml | 4 ---- CPAC/resources/configs/pipeline_config_abcd-prep.yml | 4 ---- 2 files changed, 8 deletions(-) diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 10ea6bece3..ef3dcaad18 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -207,10 +207,6 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Selected_Functional_Volume] - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: restore-brain - # Choose coregistration interpolation interpolation: spline diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml index 7aee4e80ad..c34a4138d0 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml @@ -168,10 +168,6 @@ registration_workflows: # input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference'] input: [Selected_Functional_Volume] - # reference: 'brain' or 'restore-brain' - # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference - reference: restore-brain - # Choose coregistration interpolation interpolation: spline From fa8d481bb32909ba3c4cdd067dda6a2b86d395ed Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Wed, 17 Apr 2024 17:56:09 -0400 Subject: [PATCH 07/58] added bold_mask to the list of nodeblock outputs --- CPAC/registration/registration.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9dc45c0eea..e67d7ac3c0 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3774,6 +3774,8 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, outputs={ "space-template_res-bold_desc-brain_T1w": { "Template": "T1w-brain-template-funcreg"}, + "space-template_desc-bold_mask": { + "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3840,6 +3842,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(anat_brain_to_func_res, 'out_file', convert_func_to_standard_warp, 'reference') + # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), @@ -4081,6 +4084,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None outputs = { 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), + 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') From d68a7333c0dbec67f8d256c42907a75d4c81b1a9 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 18 Apr 2024 11:45:31 -0400 Subject: [PATCH 08/58] Revert "added bold_mask to the list of nodeblock outputs" This reverts commit fa8d481bb32909ba3c4cdd067dda6a2b86d395ed. --- CPAC/registration/registration.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index e67d7ac3c0..9dc45c0eea 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3774,8 +3774,6 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, outputs={ "space-template_res-bold_desc-brain_T1w": { "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-bold_mask": { - "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3842,7 +3840,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(anat_brain_to_func_res, 'out_file', convert_func_to_standard_warp, 'reference') - # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), @@ -4084,7 +4081,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None outputs = { 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), - 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') From 4239a5f14340c2d878b2af643e604d7eac5a547e Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 18 Apr 2024 11:46:00 -0400 Subject: [PATCH 09/58] Revert "Modified warp_timeseries_to_T1template_abcd to create space-template_res-bold_desc-brain_T1w if needed" This reverts commit 68e3c41fa21ddc05e023d8d5ab1625622a65936d. --- CPAC/registration/registration.py | 88 +++++++------------------------ CPAC/surface/surf_preproc.py | 1 + 2 files changed, 19 insertions(+), 70 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9dc45c0eea..8658aae219 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1348,7 +1348,7 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w"): - outputs={} + wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -3766,14 +3766,8 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_res-bold_desc-brain_T1w", "space-template_desc-bold_mask", "T1w-brain-template-funcreg", - 'space-template_desc-preproc_T1w', - 'T1w-template-funcreg', - 'space-template_desc-T1w_mask', - 'desc-preproc_bold', ], outputs={ - "space-template_res-bold_desc-brain_T1w": { - "Template": "T1w-brain-template-funcreg"}, "space-template_desc-preproc_bold": { "Template": "T1w-brain-template-funcreg"}, "space-template_desc-scout_bold": { @@ -3822,38 +3816,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, convert_func_to_standard_warp, 'warp2') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_func_to_standard_warp, 'reference') - else: - anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_in_standard_{pipe_num}') - anat_brain_to_func_res.inputs.interp = 'spline' - anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - - node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'in_file') - - node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'ref_file') - - wf.connect(anat_brain_to_func_res, 'out_file', - convert_func_to_standard_warp, 'reference') - # Create brain masks in this space from the FreeSurfer output (changing resolution) - # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_mask_in_standard_{pipe_num}') - - anat_brain_mask_to_func_res.inputs.interp = 'nn' - anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - - node, out = strat_pool.get_data('space-template_desc-T1w_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'in_file') - - wf.connect(anat_brain_to_func_res, 'out_file', - anat_brain_mask_to_func_res, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_func_to_standard_warp, 'reference') # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 @@ -3912,11 +3876,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_registration_warp, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', convert_registration_warp, 'reference') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_registration_warp, 'reference') wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') @@ -3948,11 +3909,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_func_to_standard, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_to_standard, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, + applywarp_func_to_standard, 'ref_file') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), @@ -3968,11 +3927,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_func_mask_to_standard, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', applywarp_func_mask_to_standard, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, + applywarp_func_mask_to_standard, 'ref_file') ### Loop ends! ### @@ -4012,11 +3969,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_dc_warp, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', convert_dc_warp, 'reference') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, convert_dc_warp, 'reference') wf.connect(multiply_func_roi_by_zero, 'out_file', convert_dc_warp, 'warp1') @@ -4034,11 +3988,8 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('motion-basefile') wf.connect(node, out, applywarp_scout, 'in_file') - if strat_pool.check_rpool('space-template_res-bold_desc-brain_T1w'): - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_scout, 'reference') - else: - wf.connect(anat_brain_to_func_res, 'out_file', applywarp_scout, 'ref_file') + node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') + wf.connect(node, out, applywarp_scout, 'ref_file') wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') @@ -4047,11 +3998,9 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None merge_func_mask = pe.Node(util.Merge(2), name=f'merge_func_mask_{pipe_num}') - if strat_pool.check_rpool('space-template_desc-bold_mask'): - node, out = strat_pool.get_data('space-template_desc-bold_mask') - wf.connect(node, out, merge_func_mask, 'in1') - else: - wf.connect(anat_brain_mask_to_func_res, 'out_file', merge_func_mask, 'in1') + node, out = strat_pool.get_data('space-template_desc-bold_mask') + wf.connect(node, out, merge_func_mask, 'in1') + wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), @@ -4080,7 +4029,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None extract_scout_brain, 'operand_files') outputs = { - 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py index 806fbe486c..2b0cb64741 100755 --- a/CPAC/surface/surf_preproc.py +++ b/CPAC/surface/surf_preproc.py @@ -573,6 +573,7 @@ def run_surface(post_freesurfer_folder, "hemi-R_space-native_white", ], ) + def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): surf = pe.Node(util.Function(input_names=['post_freesurfer_folder', From 72f24bc7dda5f912d2aded64979920991573a394 Mon Sep 17 00:00:00 2001 From: Elizabeth Kenneally Date: Thu, 18 Apr 2024 11:59:35 -0400 Subject: [PATCH 10/58] :bug: Fix bug in fs ingress commandline --- CPAC/pipeline/engine.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 42662ec3a0..09be47823a 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1761,11 +1761,13 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): - if not cfg.pipeline_setup['freesurfer_dir']: + try: + fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + except KeyError: print('No FreeSurfer data present.') return rpool - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + #fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) if not os.path.exists(fs_path): if 'sub' in part_id: fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) From 4818a203b5d7d648a8ee1c9794a05a3f1f3f1707 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Thu, 18 Apr 2024 13:23:30 -0400 Subject: [PATCH 11/58] appending the nodeblock bold_mask_anatomical_resampled inside transform_timeseries_to_T1template_abcd if resource space-template_res-bold_desc-brain_T1w is missing --- CPAC/registration/registration.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8658aae219..f57ed89f43 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3783,6 +3783,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz + + if not strat_pool.check_rpool("space-template_res-bold_desc-brain_T1w"): + outputs = {} + else: + wf, outputs = bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt) + convert_func_to_anat_linear_warp = pe.Node(interface=fsl.ConvertWarp(), name=f'convert_func_to_anat_linear_warp_{pipe_num}') @@ -4028,11 +4034,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_func_mask, 'out', extract_scout_brain, 'operand_files') - outputs = { + outputs.update({ 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') - } + }) return (wf, outputs) From c493e80334cb04b66fdd5c4f01dfc66b232c0be1 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Tue, 23 Apr 2024 00:21:25 -0400 Subject: [PATCH 12/58] Abstracted anat_brain_to_bold_res and anat_brain_mask_to_bold_res from bold_mask_anatomical_resampled as two separate sub worflows that can be reused --- CPAC/func_preproc/func_preproc.py | 80 ++++++++++++++++++++++--------- CPAC/registration/registration.py | 52 +++++++++++--------- 2 files changed, 87 insertions(+), 45 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index e54d879c82..7dc2687763 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1292,6 +1292,52 @@ def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): return (wf, outputs) +def anat_brain_to_bold_res(wf_name, cfg, pipe_num): + + wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") + + inputNode = pe.Node(util.IdentityInterface(fields=['T1w-template-funcreg', + 'space-template_desc-preproc_T1w']), + name='inputspec') + outputNode = pe.Node(util.IdentityInterface(fields=['space-template_res-bold_desc-brain_T1w']), + name='outputspec') + + # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} + anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_in_standard_{pipe_num}') + + anat_brain_to_func_res.inputs.interp = 'spline' + anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_to_func_res, 'in_file') + wf.connect(inputNode, 'T1w-template-funcreg', anat_brain_to_func_res, 'ref_file') + + wf.connect(anat_brain_to_func_res, 'out_file', outputNode, 'space-template_res-bold_desc-brain_T1w') + return wf + +def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): + # Create brain masks in this space from the FreeSurfer output (changing resolution) + # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz + wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") + inputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-T1w_mask', + 'space-template_desc-preproc_T1w']), + name='inputspec') + outputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-bold_mask']), + name='outputspec') + + anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), + name=f'resample_anat_brain_mask_in_standard_{pipe_num}') + + anat_brain_mask_to_func_res.inputs.interp = 'nn' + anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + + wf.connect(inputNode, 'space-template_desc-T1w_mask', anat_brain_mask_to_func_res, 'in_file') + wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_mask_to_func_res, 'ref_file') + wf.connect(anat_brain_mask_to_func_res, 'out_file', outputNode, 'space-template_desc-bold_mask') + + return wf @nodeblock( name='bold_mask_anatomical_resampled', @@ -1308,34 +1354,23 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. ''' - # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} - anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_in_standard_{pipe_num}') - - anat_brain_to_func_res.inputs.interp = 'spline' - anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'in_file') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'ref_file') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_mask_in_standard_{pipe_num}') - - anat_brain_mask_to_func_res.inputs.interp = 'nn' - anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) + node, out = strat_pool.get_data('space-template_desc-T1w_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'in_file') + wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') - wf.connect(anat_brain_to_func_res, 'out_file', - anat_brain_mask_to_func_res, 'ref_file') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', + anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') # Resample func mask in template space back to native space func_mask_template_to_native = pe.Node( @@ -1346,21 +1381,20 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): func_mask_template_to_native.inputs.resample_mode = 'NN' func_mask_template_to_native.inputs.outputtype = 'NIFTI_GZ' - wf.connect(anat_brain_mask_to_func_res, 'out_file', + wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', func_mask_template_to_native, 'in_file') node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, func_mask_template_to_native, 'master') outputs = { - 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'out_file'), - 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'out_file'), + 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w'), + 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask'), 'space-bold_desc-brain_mask': (func_mask_template_to_native, 'out_file') } return (wf, outputs) - @nodeblock( name='bold_mask_ccs', switch=[['functional_preproc', 'run'], diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index f57ed89f43..fbd386b4a7 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3766,6 +3766,9 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_res-bold_desc-brain_T1w", "space-template_desc-bold_mask", "T1w-brain-template-funcreg", + "T1w-template-funcreg", + "space-template_desc-preproc_T1w", + "space-template_desc-T1w_mask", ], outputs={ "space-template_desc-preproc_bold": { @@ -3783,12 +3786,6 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz - - if not strat_pool.check_rpool("space-template_res-bold_desc-brain_T1w"): - outputs = {} - else: - wf, outputs = bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt) - convert_func_to_anat_linear_warp = pe.Node(interface=fsl.ConvertWarp(), name=f'convert_func_to_anat_linear_warp_{pipe_num}') @@ -3822,8 +3819,17 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, convert_func_to_standard_warp, 'warp2') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_func_to_standard_warp, 'reference') + + from CPAC.func_preproc.func_preproc import anat_brain_to_bold_res, anat_brain_mask_to_bold_res + anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) + + node, out = strat_pool.get_data('space-template_desc-preproc_T1w') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + + node, out = strat_pool.get_data('T1w-template-funcreg') + wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') + + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_func_to_standard_warp, 'reference') # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 @@ -3882,8 +3888,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_registration_warp, 'reference') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_registration_warp, 'reference') wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') @@ -3915,8 +3920,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_func_to_standard, 'ref_file') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz @@ -3933,8 +3937,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_func_mask_to_standard, 'ref_file') ### Loop ends! ### @@ -3975,8 +3978,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, convert_dc_warp, 'reference') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_dc_warp, 'reference') wf.connect(multiply_func_roi_by_zero, 'out_file', convert_dc_warp, 'warp1') @@ -3994,8 +3996,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('motion-basefile') wf.connect(node, out, applywarp_scout, 'in_file') - node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') - wf.connect(node, out, applywarp_scout, 'ref_file') + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_scout, 'ref_file') wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') @@ -4004,8 +4005,15 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None merge_func_mask = pe.Node(util.Merge(2), name=f'merge_func_mask_{pipe_num}') - node, out = strat_pool.get_data('space-template_desc-bold_mask') - wf.connect(node, out, merge_func_mask, 'in1') + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) + + node, out = strat_pool.get_data('space-template_desc-T1w_mask') + wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') + + wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', + anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + + wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', merge_func_mask, 'in1') wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') @@ -4034,11 +4042,11 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_func_mask, 'out', extract_scout_brain, 'operand_files') - outputs.update({ + outputs = { 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') - }) + } return (wf, outputs) From a2f8d231df540750bdc0b7fc79c138fbb94fece7 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 10:08:07 -0400 Subject: [PATCH 13/58] added template space warped mask as an output in the ANTs Registration nodeblock --- CPAC/registration/registration.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 9833c7ee24..0ab7dfee64 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1181,7 +1181,8 @@ def create_wf_calculate_ants_warp( 'inverse_warp_field', 'composite_transform', 'wait', - 'normalized_output_brain']), name='outputspec') + 'normalized_output_brain', + 'normalized_output_brain_mask']), name='outputspec') # use ANTS to warp the masked anatomical image to a template image ''' @@ -1204,7 +1205,8 @@ def create_wf_calculate_ants_warp( 'interp', 'reg_with_skull'], output_names=['warp_list', - 'warped_image'], + 'warped_image', + 'warped_mask'], function=hardcoded_reg, imports=reg_imports), name='calc_ants_warp', @@ -1343,6 +1345,9 @@ def create_wf_calculate_ants_warp( calculate_ants_warp, 'warped_image', outputspec, 'normalized_output_brain') + calc_ants_warp_wf.connect( + calculate_ants_warp, 'warped_mask', + outputspec, 'normalized_output_brain_mask') return calc_ants_warp_wf @@ -1785,6 +1790,8 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", outputs = { f'space-{sym}template_desc-preproc_{orig}': ( ants_reg_anat_mni, 'outputspec.normalized_output_brain'), + f'space-{sym}template_desc-{orig}_mask': ( + ants_reg_anat_mni, 'outputspec.normalized_output_brain_mask'), f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( write_composite_xfm, 'output_image'), f'from-{sym}{tmpl}template_to-{orig}_mode-image_xfm': ( @@ -2223,6 +2230,7 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "template space.", "Template": "T1w-template", }, + "space-template_desc-T1w_mask": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w native space " "to T1w-template space.", From 5ca3a1b1a8e1344418b183ec431e972b16dafb2e Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 10:37:29 -0400 Subject: [PATCH 14/58] Edited ANTs registration to output the template mask file as well --- CPAC/registration/registration.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 0ab7dfee64..4bf84927d8 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1181,8 +1181,7 @@ def create_wf_calculate_ants_warp( 'inverse_warp_field', 'composite_transform', 'wait', - 'normalized_output_brain', - 'normalized_output_brain_mask']), name='outputspec') + 'normalized_output_brain']), name='outputspec') # use ANTS to warp the masked anatomical image to a template image ''' @@ -1205,8 +1204,7 @@ def create_wf_calculate_ants_warp( 'interp', 'reg_with_skull'], output_names=['warp_list', - 'warped_image', - 'warped_mask'], + 'warped_image'], function=hardcoded_reg, imports=reg_imports), name='calc_ants_warp', @@ -1345,9 +1343,6 @@ def create_wf_calculate_ants_warp( calculate_ants_warp, 'warped_image', outputspec, 'normalized_output_brain') - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warped_mask', - outputspec, 'normalized_output_brain_mask') return calc_ants_warp_wf @@ -1790,8 +1785,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", outputs = { f'space-{sym}template_desc-preproc_{orig}': ( ants_reg_anat_mni, 'outputspec.normalized_output_brain'), - f'space-{sym}template_desc-{orig}_mask': ( - ants_reg_anat_mni, 'outputspec.normalized_output_brain_mask'), f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( write_composite_xfm, 'output_image'), f'from-{sym}{tmpl}template_to-{orig}_mode-image_xfm': ( @@ -2355,6 +2348,22 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs[new_key] = outputs[key] del outputs[key] + ants_apply_warp_t1_brain_mask_to_template = pe.Node(interface=ants.ApplyTransforms(), + name=f'ANTS-ABCD_T1_to_template_{pipe_num}') + ants_apply_warp_t1_brain_mask_to_template.inputs.dimension = 3 + ants_apply_warp_t1_brain_mask_to_template.inputs.print_out_composite_warp_file = True + ants_apply_warp_t1_brain_mask_to_template.inputs.output_image = 'ANTs_CombinedInvWarp.nii.gz' + + node, out = strat_pool.get_data(['space-T1w_desc-brain_mask']) + wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'input_image') + + node, out = strat_pool.get_data('T1w-template') + wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'reference_image') + + _, out = outputs['from-T1w_to-template_mode-image_xfm'] + wf.connect(_, out, ants_apply_warp_t1_brain_mask_to_template, 'transforms') + outputs.update({'space-template_desc-T1w_mask': (ants_apply_warp_t1_brain_mask_to_template, 'output_image')}) + return (wf, outputs) From cc285fdf19c40b3e455f0feacfcc70473e88693e Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 12:04:16 -0400 Subject: [PATCH 15/58] Revert "added template space warped mask as an output in the ANTs Registration nodeblock" This reverts commit a2f8d231df540750bdc0b7fc79c138fbb94fece7. --- CPAC/registration/registration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 4bf84927d8..415c0a5ddc 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2223,7 +2223,6 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "template space.", "Template": "T1w-template", }, - "space-template_desc-T1w_mask": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w native space " "to T1w-template space.", From 66894a40f375b2ec0d4c7cb7ce33b488f559bf3b Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Fri, 10 May 2024 15:23:17 -0400 Subject: [PATCH 16/58] changed bold_mask_anatomical_resampled to take in brain_mask instead of T1w mask --- CPAC/func_preproc/func_preproc.py | 4 ++-- CPAC/registration/registration.py | 21 ++------------------- 2 files changed, 4 insertions(+), 21 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 7dc2687763..b2f9e49ae1 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1346,7 +1346,7 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): option_key=['functional_preproc', 'func_masking', 'using'], option_val='Anatomical_Resampled', inputs=['desc-preproc_bold', 'T1w-template-funcreg', 'space-template_desc-preproc_T1w', - 'space-template_desc-T1w_mask'], + 'space-template_desc-brain_mask'], outputs=['space-template_res-bold_desc-brain_T1w', 'space-template_desc-bold_mask', 'space-bold_desc-brain_mask'] ) def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): @@ -1366,7 +1366,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - node, out = strat_pool.get_data('space-template_desc-T1w_mask') + node, out = strat_pool.get_data('space-template_desc-brain_mask') wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 415c0a5ddc..a0a1604d7c 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2346,23 +2346,6 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): f'{direction}-longitudinal') outputs[new_key] = outputs[key] del outputs[key] - - ants_apply_warp_t1_brain_mask_to_template = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_T1_to_template_{pipe_num}') - ants_apply_warp_t1_brain_mask_to_template.inputs.dimension = 3 - ants_apply_warp_t1_brain_mask_to_template.inputs.print_out_composite_warp_file = True - ants_apply_warp_t1_brain_mask_to_template.inputs.output_image = 'ANTs_CombinedInvWarp.nii.gz' - - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask']) - wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'input_image') - - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_t1_brain_mask_to_template, 'reference_image') - - _, out = outputs['from-T1w_to-template_mode-image_xfm'] - wf.connect(_, out, ants_apply_warp_t1_brain_mask_to_template, 'transforms') - outputs.update({'space-template_desc-T1w_mask': (ants_apply_warp_t1_brain_mask_to_template, 'output_image')}) - return (wf, outputs) @@ -3752,7 +3735,7 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "T1w-brain-template-funcreg", "T1w-template-funcreg", "space-template_desc-preproc_T1w", - "space-template_desc-T1w_mask", + "space-template_desc-brain_mask", ], outputs={ "space-template_desc-preproc_bold": { @@ -3991,7 +3974,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - node, out = strat_pool.get_data('space-template_desc-T1w_mask') + node, out = strat_pool.get_data('space-template_desc-brain_mask') wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', From dc41bf4f94da07dd78aeaf2fb894e11999f34748 Mon Sep 17 00:00:00 2001 From: Biraj Shrestha Date: Mon, 13 May 2024 18:06:52 -0400 Subject: [PATCH 17/58] input bold data to node swapped with reorient-bold to resolve RAI/RPI bold/mask mismatch issue --- CPAC/registration/registration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a0a1604d7c..5a957bda0d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3723,7 +3723,7 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, option_key=["apply_transform", "using"], option_val="abcd", inputs=[ - ("desc-preproc_bold", "bold", "motion-basefile", + ("desc-preproc_bold", "desc-reorient_bold", "motion-basefile", "coordinate-transformation"), "from-T1w_to-template_mode-image_xfm", "from-bold_to-T1w_mode-image_desc-linear_xfm", @@ -3807,7 +3807,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data('bold') + node, out = strat_pool.get_data('desc-reorient_bold') wf.connect(node, out, extract_func_roi, 'in_file') # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp @@ -3826,7 +3826,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None split_func.inputs.dimension = 't' - node, out = strat_pool.get_data('bold') + node, out = strat_pool.get_data('desc-reorient_bold') wf.connect(node, out, split_func, 'in_file') ### Loop starts! ### From 5d6a9ca5008cc6cd43e411e965fc82042c0bb02b Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 4 Nov 2024 23:03:43 -0500 Subject: [PATCH 18/58] :rotating_light: Lint in preparation for merge from --- .github/scripts/autoversioning.sh | 26 +- .pre-commit-config.yaml | 47 + .ruff.toml | 46 + CHANGELOG.md | 4 +- CPAC/anat_preproc/anat_preproc.py | 3179 ++++++----- CPAC/anat_preproc/lesion_preproc.py | 74 +- CPAC/func_preproc/func_preproc.py | 1914 ++++--- .../longitudinal_workflow.py | 1090 ++-- CPAC/pipeline/engine.py | 2354 ++++---- CPAC/pipeline/schema.py | 2070 +++---- CPAC/pipeline/test/test_engine.py | 136 +- CPAC/registration/registration.py | 4991 +++++++++-------- CPAC/registration/tests/mocks.py | 190 +- CPAC/registration/tests/test_registration.py | 196 +- .../configs/pipeline_config_blank.yml | 3 +- .../configs/pipeline_config_default.yml | 88 +- CPAC/resources/tests/test_templates.py | 31 +- CPAC/utils/datasource.py | 1440 ++--- CPAC/utils/test_mocks.py | 355 +- dev/docker_data/run.py | 1150 ++-- 20 files changed, 10581 insertions(+), 8803 deletions(-) create mode 100644 .ruff.toml diff --git a/.github/scripts/autoversioning.sh b/.github/scripts/autoversioning.sh index 0543f626a1..f93dc3f57e 100755 --- a/.github/scripts/autoversioning.sh +++ b/.github/scripts/autoversioning.sh @@ -18,7 +18,14 @@ # License along with C-PAC. If not, see . # Update version comment strings -cd CPAC +function wait_for_git_lock() { + while [ -f "./.git/index.lock" ]; do + echo "Waiting for the git lock file to be removed..." + sleep 1 + done +} + +cd CPAC || exit 1 VERSION=$(python -c "from info import __version__; print(('.'.join(('.'.join(__version__[::-1].split('-')[1].split('.')[1:])[::-1], __version__.split('-')[1])) if '-' in __version__ else __version__).split('+', 1)[0])") cd .. echo "v${VERSION}" > version @@ -30,8 +37,8 @@ else # Linux and others find ./CPAC/resources/configs -name "*.yml" -exec sed -i'' -r "${_SED_COMMAND}" {} \; fi -git add version -VERSIONS=( `git show $(git log --pretty=format:'%h' -n 2 version | tail -n 1):version` `cat version` ) +wait_for_git_lock && git add version +VERSIONS=( `git show $(git log --pretty=format:'%h' -n 1 version | tail -n 1):version` `cat version` ) export PATTERN="(declare|typeset) -a" if [[ "$(declare -p VERSIONS)" =~ $PATTERN ]] then @@ -52,11 +59,12 @@ then done unset IFS fi -git add CPAC/resources/configs .github/Dockerfiles +wait_for_git_lock && git add CPAC/resources/configs .github/Dockerfiles # Overwrite top-level Dockerfiles with the CI Dockerfiles -cp .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile Dockerfile -cp .github/Dockerfiles/C-PAC.develop-ABCD-HCP-bionic.Dockerfile variant-ABCD-HCP.Dockerfile -cp .github/Dockerfiles/C-PAC.develop-fMRIPrep-LTS-xenial.Dockerfile variant-fMRIPrep-LTS.Dockerfile -cp .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile variant-lite.Dockerfile -git add *Dockerfile \ No newline at end of file +wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile Dockerfile +wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile variant-lite.Dockerfile +for DOCKERFILE in $(ls *Dockerfile) +do + wait_for_git_lock && git add $DOCKERFILE +done diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7357d71417..66b0a5da0e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,8 +14,55 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +ci: + skip: [ruff, update-yaml-comments] + +fail_fast: false repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.2 + hooks: + - id: ruff + args: [--fix] + - id: ruff-format + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-case-conflict + - id: end-of-file-fixer + exclude: '.*\.?sv|.*\.pkl(z)?' + - id: mixed-line-ending + args: + - --fix=lf + - id: trailing-whitespace + exclude: '.*\.tsv' + - id: check-json + - id: pretty-format-json + args: + - --autofix + - --indent=4 + - --no-sort-keys + - id: check-merge-conflict + - id: check-yaml + - id: check-toml + + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.12.0 + hooks: + # pretty-format-yaml is disabled because it doesn't support the "YAML" directive https://yaml.org/spec/1.1/#id895631 + # - id: pretty-format-yaml + # args: + # - --autofix + # - --indent=2 + - id: pretty-format-toml + exclude: ^poetry.lock$ + args: + - --autofix + - --indent=2 + - --no-sort + - repo: local hooks: - id: autoversioning diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000000..265427a1ab --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,46 @@ +extend-exclude = ["dev/docker_data/get-pip_23.0.1.py"] +target-version = "py310" + +[format] +line-ending = "auto" # proposed setting to add next release: line-ending = "lf" + +[lint] +extend-select = ["A", "C4", "D", "EM", "F541", "G", "I", "ICN", "NPY", "PL", "RET", "RSE", "RUF", "Q", "T20", "UP032", "W"] # proposed rules to add next release cycle: ["B904", "LOG007", "TRY002", "TRY201", "TRY400", "TRY401"] +external = ["T20"] # Don't autoremove 'noqa` comments for these rules + +[lint.per-file-ignores] +"CPAC/func_preproc/func_preproc.py" = ["E402"] +"CPAC/utils/sklearn.py" = ["RUF003"] +"CPAC/utils/tests/old_functions.py" = ["C", "D", "E", "EM", "PLW", "RET"] +"CPAC/utils/utils.py" = ["T201"] # until `repickle` is removed +"setup.py" = ["D1"] + +[lint.flake8-import-conventions.extend-aliases] +"CPAC.pipeline.cpac_group_runner" = "cgr" +"nibabel" = "nib" +"nipype.interfaces.io" = "nio" +"networkx" = "nx" +"pkg_resources" = "p" +"CPAC.pipeline.nipype_pipeline_engine" = "pe" + +[lint.isort] +combine-as-imports = true +force-sort-within-sections = true +known-first-party = ["CPAC"] +no-lines-before = ["collab", "other-first-party", "local-folder"] +order-by-type = false +section-order = ["future", "standard-library", "third-party", "collab", "other-first-party", "first-party", "local-folder"] + +[lint.isort.sections] +"collab" = ["nibabel", "nilearn", "nipype", "PyBASC", "pybids", "scipy", "spython"] +"other-first-party" = ["flowdump", "indi_aws", "indi_schedulers", "PyPEER"] + +[lint.pydocstyle] +convention = "numpy" +ignore-decorators = ["CPAC.utils.docs.docstring_parameter"] + +[lint.pylint] +max-args = 10 +max-branches = 50 +max-returns = 12 +max-statements = 100 diff --git a/CHANGELOG.md b/CHANGELOG.md index 08878d2708..e8a23221ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,7 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added -- `Robustfov` feature in `FSL-BET` to crop images ensuring removal of neck regions that may appear in the skull-stripped images. +- `Robustfov` feature in `FSL-BET` to crop images ensuring removal of neck regions that may appear in the skull-stripped images. - Ability to throttle nodes, estimating all available memory when threading ### Changed @@ -236,7 +236,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - In a given pipeline configuration, segmentation probability maps and binary tissue masks are warped to template space, and those warped masks are included in the output directory - if `registration_workflows['functional_registration']['EPI_registration']['run segmentation']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `EPI_Template` - + and/or - if `registration_workflows['anatomical_registration']['run']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `T1_Template` - Renamed connectivity matrices from `*_connectome.tsv` to `*_correlations.tsv` diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index c903f54896..b37aebe003 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -17,408 +17,497 @@ # License along with C-PAC. If not, see . # from copy import deepcopy import os -from CPAC.pipeline.nodeblock import nodeblock -from nipype.interfaces import afni -from nipype.interfaces import ants -from nipype.interfaces import fsl -from nipype.interfaces import freesurfer -import nipype.interfaces.utility as util + +from nipype.interfaces import afni, ants, freesurfer, fsl from nipype.interfaces.fsl import utils as fsl_utils -from CPAC.pipeline import nipype_pipeline_engine as pe +import nipype.interfaces.utility as util + from CPAC.anat_preproc.ants import init_brain_extraction_wf -from CPAC.anat_preproc.utils import create_3dskullstrip_arg_string, \ - freesurfer_hemispheres, \ - fsl_aff_to_rigid, \ - mri_convert, \ - wb_command, \ - fslmaths_command, \ - VolumeRemoveIslands, \ - normalize_wmparc, \ - pad +from CPAC.anat_preproc.utils import ( + create_3dskullstrip_arg_string, + freesurfer_hemispheres, + fsl_aff_to_rigid, + fslmaths_command, + mri_convert, + normalize_wmparc, + pad, + VolumeRemoveIslands, + wb_command, +) +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.nodeblock import nodeblock from CPAC.utils.interfaces.fsl import Merge as fslMerge -def acpc_alignment(config=None, acpc_target='whole-head', mask=False, - wf_name='acpc_align'): +def acpc_alignment( + config=None, acpc_target="whole-head", mask=False, wf_name="acpc_align" +): preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['anat_leaf', - 'anat_brain', - 'brain_mask', - 'template_brain_only_for_anat', - 'template_brain_for_acpc', - 'template_head_for_acpc']), - name='inputspec') - - output_node = pe.Node(util.IdentityInterface(fields=['acpc_aligned_head', - 'acpc_brain_mask', - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm', - 'acpc_aligned_brain', - 'acpc_brain_mask']), - name='outputspec') - if config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'robustfov': - robust_fov = pe.Node(interface=fsl_utils.RobustFOV(), - name='anat_acpc_1_robustfov') - robust_fov.inputs.brainsize = config.anatomical_preproc['acpc_alignment']['brain_size'] - robust_fov.inputs.out_transform = 'fov_xfm.mat' - - fov, in_file = (robust_fov, 'in_file') - fov, fov_mtx = (robust_fov, 'out_transform') - fov, fov_outfile = (robust_fov, 'out_roi') - - elif config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'flirt': + inputnode = pe.Node( + util.IdentityInterface( + fields=[ + "anat_leaf", + "anat_brain", + "brain_mask", + "template_brain_only_for_anat", + "template_brain_for_acpc", + "template_head_for_acpc", + ] + ), + name="inputspec", + ) + + output_node = pe.Node( + util.IdentityInterface( + fields=[ + "acpc_aligned_head", + "acpc_brain_mask", + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + "acpc_aligned_brain", + "acpc_brain_mask", + ] + ), + name="outputspec", + ) + if config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "robustfov": + robust_fov = pe.Node( + interface=fsl_utils.RobustFOV(), name="anat_acpc_1_robustfov" + ) + robust_fov.inputs.brainsize = config.anatomical_preproc["acpc_alignment"][ + "brain_size" + ] + robust_fov.inputs.out_transform = "fov_xfm.mat" + + fov, in_file = (robust_fov, "in_file") + fov, fov_mtx = (robust_fov, "out_transform") + fov, fov_outfile = (robust_fov, "out_roi") + + elif config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": # robustfov doesn't work on some monkey data. prefer using flirt. # ${FSLDIR}/bin/flirt -in "${Input}" -applyxfm -ref "${Input}" -omat "$WD"/roi2full.mat -out "$WD"/robustroi.nii.gz # adopted from DCAN NHP https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/ACPCAlignment.sh#L80-L81 - flirt_fov = pe.Node(interface=fsl.FLIRT(), - name='anat_acpc_1_fov') - flirt_fov.inputs.args = '-applyxfm' + flirt_fov = pe.Node(interface=fsl.FLIRT(), name="anat_acpc_1_fov") + flirt_fov.inputs.args = "-applyxfm" - fov, in_file = (flirt_fov, 'in_file') - fov, ref_file = (flirt_fov, 'reference') - fov, fov_mtx = (flirt_fov, 'out_matrix_file') - fov, fov_outfile = (flirt_fov, 'out_file') + fov, in_file = (flirt_fov, "in_file") + fov, ref_file = (flirt_fov, "reference") + fov, fov_mtx = (flirt_fov, "out_matrix_file") + fov, fov_outfile = (flirt_fov, "out_file") # align head-to-head to get acpc.mat (for human) - if acpc_target == 'whole-head': - preproc.connect(inputnode, 'anat_leaf', fov, in_file) - if config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'flirt': - preproc.connect(inputnode, 'anat_leaf', fov, ref_file) + if acpc_target == "whole-head": + preproc.connect(inputnode, "anat_leaf", fov, in_file) + if config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": + preproc.connect(inputnode, "anat_leaf", fov, ref_file) # align brain-to-brain to get acpc.mat (for monkey) - if acpc_target == 'brain': - preproc.connect(inputnode, 'anat_brain', fov, in_file) - if config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'flirt': - preproc.connect(inputnode, 'anat_brain', fov, ref_file) + if acpc_target == "brain": + preproc.connect(inputnode, "anat_brain", fov, in_file) + if config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": + preproc.connect(inputnode, "anat_brain", fov, ref_file) - convert_fov_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name='anat_acpc_2_fov_convertxfm') + convert_fov_xfm = pe.Node( + interface=fsl_utils.ConvertXFM(), name="anat_acpc_2_fov_convertxfm" + ) convert_fov_xfm.inputs.invert_xfm = True - preproc.connect(fov, fov_mtx, - convert_fov_xfm, 'in_file') + preproc.connect(fov, fov_mtx, convert_fov_xfm, "in_file") - align = pe.Node(interface=fsl.FLIRT(), - name='anat_acpc_3_flirt') - align.inputs.interp = 'spline' + align = pe.Node(interface=fsl.FLIRT(), name="anat_acpc_3_flirt") + align.inputs.interp = "spline" align.inputs.searchr_x = [30, 30] align.inputs.searchr_y = [30, 30] align.inputs.searchr_z = [30, 30] - preproc.connect(fov, fov_outfile, align, 'in_file') + preproc.connect(fov, fov_outfile, align, "in_file") # align head-to-head to get acpc.mat (for human) - if acpc_target == 'whole-head': - preproc.connect(inputnode, 'template_head_for_acpc', align, - 'reference') + if acpc_target == "whole-head": + preproc.connect(inputnode, "template_head_for_acpc", align, "reference") # align brain-to-brain to get acpc.mat (for monkey) - if acpc_target == 'brain': - preproc.connect(inputnode, 'template_brain_for_acpc', align, - 'reference') + if acpc_target == "brain": + preproc.connect(inputnode, "template_brain_for_acpc", align, "reference") - concat_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name='anat_acpc_4_concatxfm') + concat_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), name="anat_acpc_4_concatxfm") concat_xfm.inputs.concat_xfm = True - preproc.connect(convert_fov_xfm, 'out_file', concat_xfm, 'in_file') - preproc.connect(align, 'out_matrix_file', concat_xfm, 'in_file2') + preproc.connect(convert_fov_xfm, "out_file", concat_xfm, "in_file") + preproc.connect(align, "out_matrix_file", concat_xfm, "in_file2") - aff_to_rig_imports = ['import os', 'from numpy import *'] - aff_to_rig = pe.Node(util.Function(input_names=['in_xfm', 'out_name'], - output_names=['out_mat'], - function=fsl_aff_to_rigid, - imports=aff_to_rig_imports), - name='anat_acpc_5_aff2rigid') - aff_to_rig.inputs.out_name = 'acpc.mat' + aff_to_rig_imports = ["import os", "from numpy import *"] + aff_to_rig = pe.Node( + util.Function( + input_names=["in_xfm", "out_name"], + output_names=["out_mat"], + function=fsl_aff_to_rigid, + imports=aff_to_rig_imports, + ), + name="anat_acpc_5_aff2rigid", + ) + aff_to_rig.inputs.out_name = "acpc.mat" - preproc.connect(concat_xfm, 'out_file', aff_to_rig, 'in_xfm') - preproc.connect(aff_to_rig, 'out_mat', output_node, 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + preproc.connect(concat_xfm, "out_file", aff_to_rig, "in_xfm") + preproc.connect( + aff_to_rig, + "out_mat", + output_node, + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ) - apply_xfm = pe.Node(interface=fsl.ApplyWarp(), - name='anat_acpc_6_applywarp') - apply_xfm.inputs.interp = 'spline' + apply_xfm = pe.Node(interface=fsl.ApplyWarp(), name="anat_acpc_6_applywarp") + apply_xfm.inputs.interp = "spline" apply_xfm.inputs.relwarp = True - preproc.connect(inputnode, 'anat_leaf', apply_xfm, 'in_file') - preproc.connect(inputnode, 'template_head_for_acpc', apply_xfm, - 'ref_file') - preproc.connect(aff_to_rig, 'out_mat', apply_xfm, 'premat') - preproc.connect(apply_xfm, 'out_file', output_node, 'acpc_aligned_head') + preproc.connect(inputnode, "anat_leaf", apply_xfm, "in_file") + preproc.connect(inputnode, "template_head_for_acpc", apply_xfm, "ref_file") + preproc.connect(aff_to_rig, "out_mat", apply_xfm, "premat") + preproc.connect(apply_xfm, "out_file", output_node, "acpc_aligned_head") - if acpc_target == 'brain': - apply_xfm_brain = pe.Node(interface=fsl.ApplyWarp(), - name='anat_acpc_brain_6_applywarp') - apply_xfm_brain.inputs.interp = 'spline' + if acpc_target == "brain": + apply_xfm_brain = pe.Node( + interface=fsl.ApplyWarp(), name="anat_acpc_brain_6_applywarp" + ) + apply_xfm_brain.inputs.interp = "spline" apply_xfm_brain.inputs.relwarp = True - preproc.connect(inputnode, 'anat_brain', apply_xfm_brain, 'in_file') - preproc.connect(inputnode, 'template_brain_for_acpc', apply_xfm_brain, - 'ref_file') - preproc.connect(aff_to_rig, 'out_mat', apply_xfm_brain, 'premat') - preproc.connect(apply_xfm_brain, 'out_file', output_node, 'acpc_aligned_brain') + preproc.connect(inputnode, "anat_brain", apply_xfm_brain, "in_file") + preproc.connect( + inputnode, "template_brain_for_acpc", apply_xfm_brain, "ref_file" + ) + preproc.connect(aff_to_rig, "out_mat", apply_xfm_brain, "premat") + preproc.connect(apply_xfm_brain, "out_file", output_node, "acpc_aligned_brain") if mask: - apply_xfm_mask = pe.Node(interface=fsl.ApplyWarp(), - name='anat_mask_acpc_7_applywarp') - apply_xfm_mask.inputs.interp = 'nn' + apply_xfm_mask = pe.Node( + interface=fsl.ApplyWarp(), name="anat_mask_acpc_7_applywarp" + ) + apply_xfm_mask.inputs.interp = "nn" apply_xfm_mask.inputs.relwarp = True - preproc.connect(inputnode, 'brain_mask', apply_xfm_mask, 'in_file') - preproc.connect(inputnode, 'template_brain_for_acpc', apply_xfm_mask, - 'ref_file') - preproc.connect(aff_to_rig, 'out_mat', apply_xfm_mask, 'premat') - preproc.connect(apply_xfm_mask, 'out_file', output_node, - 'acpc_brain_mask') + preproc.connect(inputnode, "brain_mask", apply_xfm_mask, "in_file") + preproc.connect( + inputnode, "template_brain_for_acpc", apply_xfm_mask, "ref_file" + ) + preproc.connect(aff_to_rig, "out_mat", apply_xfm_mask, "premat") + preproc.connect(apply_xfm_mask, "out_file", output_node, "acpc_brain_mask") return preproc -def T2wToT1wReg(wf_name='T2w_to_T1w_reg'): - +def T2wToT1wReg(wf_name="T2w_to_T1w_reg"): # Adapted from DCAN lab # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/T2wToT1wReg.sh preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['T1w', - 'T1w_brain', - 'T2w', - 'T2w_brain']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["T1w", "T1w_brain", "T2w", "T2w_brain"]), + name="inputspec", + ) - outputnode = pe.Node(util.IdentityInterface(fields=['T2w_to_T1w']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["T2w_to_T1w"]), name="outputspec" + ) # ${FSLDIR}/bin/epi_reg --epi="$T2wImageBrain" --t1="$T1wImage" --t1brain="$WD"/"$T1wImageBrainFile" --out="$WD"/T2w2T1w - T2w2T1w_reg = pe.Node(interface=fsl.EpiReg(), - name='T2w2T1w_reg') - T2w2T1w_reg.inputs.out_base = 'T2w2T1w' + T2w2T1w_reg = pe.Node(interface=fsl.EpiReg(), name="T2w2T1w_reg") + T2w2T1w_reg.inputs.out_base = "T2w2T1w" - preproc.connect(inputnode, 'T2w_brain', T2w2T1w_reg ,'epi') - preproc.connect(inputnode, 'T1w', T2w2T1w_reg ,'t1_head') - preproc.connect(inputnode, 'T1w_brain', T2w2T1w_reg ,'t1_brain') + preproc.connect(inputnode, "T2w_brain", T2w2T1w_reg, "epi") + preproc.connect(inputnode, "T1w", T2w2T1w_reg, "t1_head") + preproc.connect(inputnode, "T1w_brain", T2w2T1w_reg, "t1_brain") # ${FSLDIR}/bin/applywarp --rel --interp=spline --in="$T2wImage" --ref="$T1wImage" --premat="$WD"/T2w2T1w.mat --out="$WD"/T2w2T1w - T2w2T1w = pe.Node(interface=fsl.ApplyWarp(), - name='T2w2T1w_applywarp') - T2w2T1w.inputs.interp = 'spline' + T2w2T1w = pe.Node(interface=fsl.ApplyWarp(), name="T2w2T1w_applywarp") + T2w2T1w.inputs.interp = "spline" T2w2T1w.inputs.relwarp = True - preproc.connect(inputnode, 'T2w', T2w2T1w, 'in_file') - preproc.connect(inputnode, 'T1w', T2w2T1w, 'ref_file') - preproc.connect(T2w2T1w_reg, 'epi2str_mat', T2w2T1w, 'premat') + preproc.connect(inputnode, "T2w", T2w2T1w, "in_file") + preproc.connect(inputnode, "T1w", T2w2T1w, "ref_file") + preproc.connect(T2w2T1w_reg, "epi2str_mat", T2w2T1w, "premat") # ${FSLDIR}/bin/fslmaths "$WD"/T2w2T1w -add 1 "$WD"/T2w2T1w -odt float - T2w2T1w_final = pe.Node(interface=fsl.ImageMaths(), - name='T2w2T1w_final') - T2w2T1w_final.inputs.op_string = "-add 1" + T2w2T1w_final = pe.Node(interface=fsl.ImageMaths(), name="T2w2T1w_final") + T2w2T1w_final.inputs.op_string = "-add 1" - preproc.connect(T2w2T1w, 'out_file', T2w2T1w_final, 'in_file') - preproc.connect(T2w2T1w_final, 'out_file', outputnode, 'T2w_to_T1w') + preproc.connect(T2w2T1w, "out_file", T2w2T1w_final, "in_file") + preproc.connect(T2w2T1w_final, "out_file", outputnode, "T2w_to_T1w") return preproc -def BiasFieldCorrection_sqrtT1wXT1w(config=None, wf_name='biasfield_correction_t1t2'): - +def BiasFieldCorrection_sqrtT1wXT1w(config=None, wf_name="biasfield_correction_t1t2"): # Adapted from DCAN lab # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/BiasFieldCorrection_sqrtT1wXT1w.sh preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['T1w', - 'T1w_brain', - 'T2w']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["T1w", "T1w_brain", "T2w"]), name="inputspec" + ) - outputnode = pe.Node(util.IdentityInterface(fields=['T1w_biascorrected', - 'T1w_brain_biascorrected', - 'T2w_biascorrected', - 'T2w_brain_biascorrected', - 'biasfield']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface( + fields=[ + "T1w_biascorrected", + "T1w_brain_biascorrected", + "T2w_biascorrected", + "T2w_brain_biascorrected", + "biasfield", + ] + ), + name="outputspec", + ) # 1. Form sqrt(T1w*T2w), mask this and normalise by the mean # ${FSLDIR}/bin/fslmaths $T1wImage -mul $T2wImage -abs -sqrt $WD/T1wmulT2w.nii.gz -odt float - T1wmulT2w = pe.Node(interface=fsl.MultiImageMaths(), - name='T1wmulT2w') + T1wmulT2w = pe.Node(interface=fsl.MultiImageMaths(), name="T1wmulT2w") T1wmulT2w.inputs.op_string = "-mul %s -abs -sqrt" - - preproc.connect(inputnode, 'T1w', T1wmulT2w, 'in_file') - preproc.connect(inputnode, 'T2w', T1wmulT2w, 'operand_files') + + preproc.connect(inputnode, "T1w", T1wmulT2w, "in_file") + preproc.connect(inputnode, "T2w", T1wmulT2w, "operand_files") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w.nii.gz -mas $T1wImageBrain $WD/T1wmulT2w_brain.nii.gz - T1wmulT2w_brain = pe.Node(interface=fsl.MultiImageMaths(), - name='T1wmulT2w_brain') + T1wmulT2w_brain = pe.Node(interface=fsl.MultiImageMaths(), name="T1wmulT2w_brain") T1wmulT2w_brain.inputs.op_string = "-mas %s " - preproc.connect(T1wmulT2w, 'out_file', T1wmulT2w_brain, 'in_file') - preproc.connect(inputnode, 'T1w_brain', T1wmulT2w_brain, 'operand_files') + preproc.connect(T1wmulT2w, "out_file", T1wmulT2w_brain, "in_file") + preproc.connect(inputnode, "T1w_brain", T1wmulT2w_brain, "operand_files") # meanbrainval=`${FSLDIR}/bin/fslstats $WD/T1wmulT2w_brain.nii.gz -M` - meanbrainval = pe.Node(interface=fsl.ImageStats(), - name='image_stats', - iterfield=['in_file']) - meanbrainval.inputs.op_string = '-M' + meanbrainval = pe.Node( + interface=fsl.ImageStats(), name="image_stats", iterfield=["in_file"] + ) + meanbrainval.inputs.op_string = "-M" - preproc.connect(T1wmulT2w_brain, 'out_file', meanbrainval, 'in_file') + preproc.connect(T1wmulT2w_brain, "out_file", meanbrainval, "in_file") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain.nii.gz -div $meanbrainval $WD/T1wmulT2w_brain_norm.nii.gz - T1wmulT2w_brain_norm = pe.Node(interface=fsl.ImageMaths(), - name='T1wmulT2w_brain_norm') - - def form_meanbrainval_string(meanbrainval): - return '-div %f' % (meanbrainval) + T1wmulT2w_brain_norm = pe.Node( + interface=fsl.ImageMaths(), name="T1wmulT2w_brain_norm" + ) - preproc.connect(T1wmulT2w_brain, 'out_file', T1wmulT2w_brain_norm, 'in_file') - preproc.connect(meanbrainval, ('out_stat', form_meanbrainval_string), - T1wmulT2w_brain_norm, 'op_string') + def form_meanbrainval_string(meanbrainval): + return "-div %f" % (meanbrainval) + + preproc.connect(T1wmulT2w_brain, "out_file", T1wmulT2w_brain_norm, "in_file") + preproc.connect( + meanbrainval, + ("out_stat", form_meanbrainval_string), + T1wmulT2w_brain_norm, + "op_string", + ) # 2. Smooth the normalised sqrt image, using within-mask smoothing : s(Mask*X)/s(Mask) # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -bin -s $BiasFieldSmoothingSigma $WD/SmoothNorm_s${BiasFieldSmoothingSigma}.nii.gz - SmoothNorm = pe.Node(interface=fsl.ImageMaths(), - name='SmoothNorm') - SmoothNorm.inputs.op_string = "-bin -s %f" % (config.anatomical_preproc['t1t2_bias_field_correction']['BiasFieldSmoothingSigma']) + SmoothNorm = pe.Node(interface=fsl.ImageMaths(), name="SmoothNorm") + SmoothNorm.inputs.op_string = ( + "-bin -s %f" + % ( + config.anatomical_preproc["t1t2_bias_field_correction"][ + "BiasFieldSmoothingSigma" + ] + ) + ) - preproc.connect(T1wmulT2w_brain_norm, 'out_file', SmoothNorm, 'in_file') + preproc.connect(T1wmulT2w_brain_norm, "out_file", SmoothNorm, "in_file") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -s $BiasFieldSmoothingSigma -div $WD/SmoothNorm_s${BiasFieldSmoothingSigma}.nii.gz $WD/T1wmulT2w_brain_norm_s${BiasFieldSmoothingSigma}.nii.gz def T1wmulT2w_brain_norm_s_string(sigma, in_file): - return "-s %f -div %s" %(sigma, in_file) - - T1wmulT2w_brain_norm_s_string = pe.Node(util.Function(input_names=['sigma', 'in_file'], - output_names=['out_str'], - function=T1wmulT2w_brain_norm_s_string), - name='T1wmulT2w_brain_norm_s_string') - T1wmulT2w_brain_norm_s_string.inputs.sigma = config.anatomical_preproc['t1t2_bias_field_correction']['BiasFieldSmoothingSigma'] - - preproc.connect(SmoothNorm, 'out_file', T1wmulT2w_brain_norm_s_string, 'in_file') - - T1wmulT2w_brain_norm_s = pe.Node(interface=fsl.ImageMaths(), - name='T1wmulT2w_brain_norm_s') - - preproc.connect(T1wmulT2w_brain_norm, 'out_file', T1wmulT2w_brain_norm_s, 'in_file') - preproc.connect(T1wmulT2w_brain_norm_s_string, 'out_str', T1wmulT2w_brain_norm_s, 'op_string') + return "-s %f -div %s" % (sigma, in_file) + + T1wmulT2w_brain_norm_s_string = pe.Node( + util.Function( + input_names=["sigma", "in_file"], + output_names=["out_str"], + function=T1wmulT2w_brain_norm_s_string, + ), + name="T1wmulT2w_brain_norm_s_string", + ) + T1wmulT2w_brain_norm_s_string.inputs.sigma = config.anatomical_preproc[ + "t1t2_bias_field_correction" + ]["BiasFieldSmoothingSigma"] + + preproc.connect(SmoothNorm, "out_file", T1wmulT2w_brain_norm_s_string, "in_file") + + T1wmulT2w_brain_norm_s = pe.Node( + interface=fsl.ImageMaths(), name="T1wmulT2w_brain_norm_s" + ) + + preproc.connect(T1wmulT2w_brain_norm, "out_file", T1wmulT2w_brain_norm_s, "in_file") + preproc.connect( + T1wmulT2w_brain_norm_s_string, "out_str", T1wmulT2w_brain_norm_s, "op_string" + ) # 3. Divide normalised sqrt image by smoothed version (to do simple bias correction) # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -div $WD/T1wmulT2w_brain_norm_s$BiasFieldSmoothingSigma.nii.gz $WD/T1wmulT2w_brain_norm_modulate.nii.gz - T1wmulT2w_brain_norm_modulate = pe.Node(interface=fsl.MultiImageMaths(), - name='T1wmulT2w_brain_norm_modulate') - T1wmulT2w_brain_norm_modulate.inputs.op_string = "-div %s" + T1wmulT2w_brain_norm_modulate = pe.Node( + interface=fsl.MultiImageMaths(), name="T1wmulT2w_brain_norm_modulate" + ) + T1wmulT2w_brain_norm_modulate.inputs.op_string = "-div %s" - preproc.connect(T1wmulT2w_brain_norm, 'out_file', T1wmulT2w_brain_norm_modulate, 'in_file') - preproc.connect(T1wmulT2w_brain_norm_s, 'out_file', T1wmulT2w_brain_norm_modulate, 'operand_files') + preproc.connect( + T1wmulT2w_brain_norm, "out_file", T1wmulT2w_brain_norm_modulate, "in_file" + ) + preproc.connect( + T1wmulT2w_brain_norm_s, + "out_file", + T1wmulT2w_brain_norm_modulate, + "operand_files", + ) # 4. Create a mask using a threshold at Mean - 0.5*Stddev, with filling of holes to remove any non-grey/white tissue. # STD=`${FSLDIR}/bin/fslstats $WD/T1wmulT2w_brain_norm_modulate.nii.gz -S` - STD = pe.Node(interface=fsl.ImageStats(), - name='STD', - iterfield=['in_file']) - STD.inputs.op_string = '-S' + STD = pe.Node(interface=fsl.ImageStats(), name="STD", iterfield=["in_file"]) + STD.inputs.op_string = "-S" - preproc.connect(T1wmulT2w_brain_norm_modulate, 'out_file', STD, 'in_file') + preproc.connect(T1wmulT2w_brain_norm_modulate, "out_file", STD, "in_file") # MEAN=`${FSLDIR}/bin/fslstats $WD/T1wmulT2w_brain_norm_modulate.nii.gz -M` - MEAN = pe.Node(interface=fsl.ImageStats(), - name='MEAN', - iterfield=['in_file']) - MEAN.inputs.op_string = '-M' + MEAN = pe.Node(interface=fsl.ImageStats(), name="MEAN", iterfield=["in_file"]) + MEAN.inputs.op_string = "-M" + + preproc.connect(T1wmulT2w_brain_norm_modulate, "out_file", MEAN, "in_file") - preproc.connect(T1wmulT2w_brain_norm_modulate, 'out_file', MEAN, 'in_file') - # Lower=`echo "$MEAN - ($STD * $Factor)" | bc -l` def form_lower_string(mean, std): - Factor = 0.5 #Leave this at 0.5 for now it is the number of standard deviations below the mean to threshold the non-brain tissues at - lower = str(float(mean)-(float(std)*float(Factor))) - return '-thr %s -bin -ero -mul 255' % (lower) - - form_lower_string = pe.Node(util.Function(input_names=['mean', 'std'], - output_names=['out_str'], - function=form_lower_string), - name='form_lower_string') + Factor = 0.5 # Leave this at 0.5 for now it is the number of standard deviations below the mean to threshold the non-brain tissues at + lower = str(float(mean) - (float(std) * float(Factor))) + return "-thr %s -bin -ero -mul 255" % (lower) + + form_lower_string = pe.Node( + util.Function( + input_names=["mean", "std"], + output_names=["out_str"], + function=form_lower_string, + ), + name="form_lower_string", + ) - preproc.connect(MEAN, 'out_stat', form_lower_string, 'mean') - preproc.connect(STD, 'out_stat', form_lower_string, 'std') + preproc.connect(MEAN, "out_stat", form_lower_string, "mean") + preproc.connect(STD, "out_stat", form_lower_string, "std") # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm_modulate -thr $Lower -bin -ero -mul 255 $WD/T1wmulT2w_brain_norm_modulate_mask - T1wmulT2w_brain_norm_modulate_mask = pe.Node(interface=fsl.ImageMaths(), - name='T1wmulT2w_brain_norm_modulate_mask') + T1wmulT2w_brain_norm_modulate_mask = pe.Node( + interface=fsl.ImageMaths(), name="T1wmulT2w_brain_norm_modulate_mask" + ) - preproc.connect(T1wmulT2w_brain_norm_modulate, 'out_file', T1wmulT2w_brain_norm_modulate_mask, 'in_file') - preproc.connect(form_lower_string, 'out_str', T1wmulT2w_brain_norm_modulate_mask, 'op_string') + preproc.connect( + T1wmulT2w_brain_norm_modulate, + "out_file", + T1wmulT2w_brain_norm_modulate_mask, + "in_file", + ) + preproc.connect( + form_lower_string, "out_str", T1wmulT2w_brain_norm_modulate_mask, "op_string" + ) # ${CARET7DIR}/wb_command -volume-remove-islands $WD/T1wmulT2w_brain_norm_modulate_mask.nii.gz $WD/T1wmulT2w_brain_norm_modulate_mask.nii.gz - T1wmulT2w_brain_norm_modulate_mask_roi = pe.Node(interface=VolumeRemoveIslands(), - name='remove_islands') + T1wmulT2w_brain_norm_modulate_mask_roi = pe.Node( + interface=VolumeRemoveIslands(), name="remove_islands" + ) - preproc.connect(T1wmulT2w_brain_norm_modulate_mask, 'out_file', T1wmulT2w_brain_norm_modulate_mask_roi, 'in_file') + preproc.connect( + T1wmulT2w_brain_norm_modulate_mask, + "out_file", + T1wmulT2w_brain_norm_modulate_mask_roi, + "in_file", + ) # 5. Extrapolate normalised sqrt image from mask region out to whole FOV # ${FSLDIR}/bin/fslmaths $WD/T1wmulT2w_brain_norm.nii.gz -mas $WD/T1wmulT2w_brain_norm_modulate_mask.nii.gz -dilall $WD/bias_raw.nii.gz -odt float - bias_raw = pe.Node(interface=fsl.MultiImageMaths(), - name='bias_raw') + bias_raw = pe.Node(interface=fsl.MultiImageMaths(), name="bias_raw") bias_raw.inputs.op_string = "-mas %s -dilall " - preproc.connect(T1wmulT2w_brain_norm, 'out_file', bias_raw, 'in_file') - preproc.connect(T1wmulT2w_brain_norm_modulate_mask_roi, 'out_file', bias_raw, 'operand_files') + preproc.connect(T1wmulT2w_brain_norm, "out_file", bias_raw, "in_file") + preproc.connect( + T1wmulT2w_brain_norm_modulate_mask_roi, "out_file", bias_raw, "operand_files" + ) # ${FSLDIR}/bin/fslmaths $WD/bias_raw.nii.gz -s $BiasFieldSmoothingSigma $OutputBiasField - OutputBiasField = pe.Node(interface=fsl.ImageMaths(), - name='OutputBiasField') - OutputBiasField.inputs.op_string = "-s %f " % (config.anatomical_preproc['t1t2_bias_field_correction']['BiasFieldSmoothingSigma']) + OutputBiasField = pe.Node(interface=fsl.ImageMaths(), name="OutputBiasField") + OutputBiasField.inputs.op_string = ( + "-s %f " + % ( + config.anatomical_preproc["t1t2_bias_field_correction"][ + "BiasFieldSmoothingSigma" + ] + ) + ) - preproc.connect(bias_raw, 'out_file', OutputBiasField, 'in_file') + preproc.connect(bias_raw, "out_file", OutputBiasField, "in_file") # 6. Use bias field output to create corrected images def file_to_a_list(infile_1, infile_2): - return list([infile_1,infile_2]) - - file_to_a_list = pe.Node(util.Function(input_names=['infile_1', 'infile_2'], - output_names=['out_list'], - function=file_to_a_list), - name='file_to_a_list') + return list([infile_1, infile_2]) + + file_to_a_list = pe.Node( + util.Function( + input_names=["infile_1", "infile_2"], + output_names=["out_list"], + function=file_to_a_list, + ), + name="file_to_a_list", + ) - preproc.connect(OutputBiasField, 'out_file', file_to_a_list, 'infile_1') - preproc.connect(inputnode, 'T1w_brain', file_to_a_list, 'infile_2') + preproc.connect(OutputBiasField, "out_file", file_to_a_list, "infile_1") + preproc.connect(inputnode, "T1w_brain", file_to_a_list, "infile_2") # ${FSLDIR}/bin/fslmaths $T1wImage -div $OutputBiasField -mas $T1wImageBrain $OutputT1wRestoredBrainImage -odt float - OutputT1wRestoredBrainImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT1wRestoredBrainImage') - OutputT1wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " + OutputT1wRestoredBrainImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT1wRestoredBrainImage" + ) + OutputT1wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " + + preproc.connect(inputnode, "T1w", OutputT1wRestoredBrainImage, "in_file") + preproc.connect( + file_to_a_list, "out_list", OutputT1wRestoredBrainImage, "operand_files" + ) - preproc.connect(inputnode, 'T1w', OutputT1wRestoredBrainImage, 'in_file') - preproc.connect(file_to_a_list,'out_list',OutputT1wRestoredBrainImage, 'operand_files') - # ${FSLDIR}/bin/fslmaths $T1wImage -div $OutputBiasField $OutputT1wRestoredImage -odt float - OutputT1wRestoredImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT1wRestoredImage') + OutputT1wRestoredImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT1wRestoredImage" + ) OutputT1wRestoredImage.inputs.op_string = "-div %s " - preproc.connect(inputnode, 'T1w', OutputT1wRestoredImage, 'in_file') - preproc.connect(OutputBiasField, 'out_file', OutputT1wRestoredImage, 'operand_files') + preproc.connect(inputnode, "T1w", OutputT1wRestoredImage, "in_file") + preproc.connect( + OutputBiasField, "out_file", OutputT1wRestoredImage, "operand_files" + ) # ${FSLDIR}/bin/fslmaths $T2wImage -div $OutputBiasField -mas $T1wImageBrain $OutputT2wRestoredBrainImage -odt float - OutputT2wRestoredBrainImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT2wRestoredBrainImage') - OutputT2wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " - - preproc.connect(inputnode, 'T2w', OutputT2wRestoredBrainImage, 'in_file') - preproc.connect(file_to_a_list,'out_list',OutputT2wRestoredBrainImage, 'operand_files') + OutputT2wRestoredBrainImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT2wRestoredBrainImage" + ) + OutputT2wRestoredBrainImage.inputs.op_string = "-div %s -mas %s " + + preproc.connect(inputnode, "T2w", OutputT2wRestoredBrainImage, "in_file") + preproc.connect( + file_to_a_list, "out_list", OutputT2wRestoredBrainImage, "operand_files" + ) # ${FSLDIR}/bin/fslmaths $T2wImage -div $OutputBiasField $OutputT2wRestoredImage -odt float - OutputT2wRestoredImage = pe.Node(interface=fsl.MultiImageMaths(), - name='OutputT2wRestoredImage') + OutputT2wRestoredImage = pe.Node( + interface=fsl.MultiImageMaths(), name="OutputT2wRestoredImage" + ) OutputT2wRestoredImage.inputs.op_string = "-div %s " - preproc.connect(inputnode, 'T2w', OutputT2wRestoredImage, 'in_file') - preproc.connect(OutputBiasField, 'out_file', OutputT2wRestoredImage, 'operand_files') + preproc.connect(inputnode, "T2w", OutputT2wRestoredImage, "in_file") + preproc.connect( + OutputBiasField, "out_file", OutputT2wRestoredImage, "operand_files" + ) - preproc.connect(OutputT1wRestoredImage, 'out_file', outputnode, 'T1w_biascorrected') - preproc.connect(OutputT1wRestoredBrainImage, 'out_file', outputnode, 'T1w_brain_biascorrected') - preproc.connect(OutputT2wRestoredImage, 'out_file', outputnode, 'T2w_biascorrected') - preproc.connect(OutputT2wRestoredBrainImage, 'out_file', outputnode, 'T2w_brain_biascorrected') - preproc.connect(OutputBiasField, 'out_file', outputnode, 'biasfield') + preproc.connect(OutputT1wRestoredImage, "out_file", outputnode, "T1w_biascorrected") + preproc.connect( + OutputT1wRestoredBrainImage, "out_file", outputnode, "T1w_brain_biascorrected" + ) + preproc.connect(OutputT2wRestoredImage, "out_file", outputnode, "T2w_biascorrected") + preproc.connect( + OutputT2wRestoredBrainImage, "out_file", outputnode, "T2w_brain_biascorrected" + ) + preproc.connect(OutputBiasField, "out_file", outputnode, "biasfield") return preproc @@ -426,284 +515,330 @@ def file_to_a_list(infile_1, infile_2): def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Skull-stripping using AFNI 3dSkullStrip inputnode_afni = pe.Node( - util.IdentityInterface(fields=['mask_vol', - 'shrink_factor', - 'var_shrink_fac', - 'shrink_fac_bot_lim', - 'avoid_vent', - 'niter', - 'pushout', - 'touchup', - 'fill_hole', - 'NN_smooth', - 'smooth_final', - 'avoid_eyes', - 'use_edge', - 'exp_frac', - 'push_to_edge', - 'use_skull', - 'perc_int', - 'max_inter_iter', - 'blur_fwhm', - 'fac', - 'monkey']), - name=f'AFNI_options_{pipe_num}') - - skullstrip_args = pe.Node(util.Function(input_names=['spat_norm', - 'spat_norm_dxyz', - 'mask_vol', - 'shrink_fac', - 'var_shrink_fac', - 'shrink_fac_bot_lim', - 'avoid_vent', - 'niter', - 'pushout', - 'touchup', - 'fill_hole', - 'NN_smooth', - 'smooth_final', - 'avoid_eyes', - 'use_edge', - 'exp_frac', - 'push_to_edge', - 'use_skull', - 'perc_int', - 'max_inter_iter', - 'blur_fwhm', - 'fac', - 'monkey'], - output_names=['expr'], - function=create_3dskullstrip_arg_string), - name=f'anat_skullstrip_args_{pipe_num}') + util.IdentityInterface( + fields=[ + "mask_vol", + "shrink_factor", + "var_shrink_fac", + "shrink_fac_bot_lim", + "avoid_vent", + "niter", + "pushout", + "touchup", + "fill_hole", + "NN_smooth", + "smooth_final", + "avoid_eyes", + "use_edge", + "exp_frac", + "push_to_edge", + "use_skull", + "perc_int", + "max_inter_iter", + "blur_fwhm", + "fac", + "monkey", + ] + ), + name=f"AFNI_options_{pipe_num}", + ) + + skullstrip_args = pe.Node( + util.Function( + input_names=[ + "spat_norm", + "spat_norm_dxyz", + "mask_vol", + "shrink_fac", + "var_shrink_fac", + "shrink_fac_bot_lim", + "avoid_vent", + "niter", + "pushout", + "touchup", + "fill_hole", + "NN_smooth", + "smooth_final", + "avoid_eyes", + "use_edge", + "exp_frac", + "push_to_edge", + "use_skull", + "perc_int", + "max_inter_iter", + "blur_fwhm", + "fac", + "monkey", + ], + output_names=["expr"], + function=create_3dskullstrip_arg_string, + ), + name=f"anat_skullstrip_args_{pipe_num}", + ) inputnode_afni.inputs.set( - mask_vol=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['mask_vol'], - shrink_factor= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['shrink_factor'], - var_shrink_fac= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['var_shrink_fac'], - shrink_fac_bot_lim= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['shrink_factor_bot_lim'], - avoid_vent= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['avoid_vent'], - niter=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['n_iterations'], - pushout=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['pushout'], - touchup=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['touchup'], - fill_hole=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['fill_hole'], - NN_smooth=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['NN_smooth'], - smooth_final= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['smooth_final'], - avoid_eyes= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['avoid_eyes'], - use_edge=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['use_edge'], - exp_frac=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['exp_frac'], - push_to_edge= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['push_to_edge'], - use_skull=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['use_skull'], - perc_int=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['perc_int'], - max_inter_iter= - cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['max_inter_iter'], - fac=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['fac'], - blur_fwhm=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['blur_fwhm'], - monkey=cfg.anatomical_preproc['brain_extraction'][ - 'AFNI-3dSkullStrip']['monkey'], - ) - - wf.connect([ - (inputnode_afni, skullstrip_args, [ - ('mask_vol', 'mask_vol'), - ('shrink_factor', 'shrink_fac'), - ('var_shrink_fac', 'var_shrink_fac'), - ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'), - ('avoid_vent', 'avoid_vent'), - ('niter', 'niter'), - ('pushout', 'pushout'), - ('touchup', 'touchup'), - ('fill_hole', 'fill_hole'), - ('avoid_eyes', 'avoid_eyes'), - ('use_edge', 'use_edge'), - ('exp_frac', 'exp_frac'), - ('NN_smooth', 'NN_smooth'), - ('smooth_final', 'smooth_final'), - ('push_to_edge', 'push_to_edge'), - ('use_skull', 'use_skull'), - ('perc_int', 'perc_int'), - ('max_inter_iter', 'max_inter_iter'), - ('blur_fwhm', 'blur_fwhm'), - ('fac', 'fac'), - ('monkey', 'monkey') - ]) - ]) - - anat_skullstrip = pe.Node(interface=afni.SkullStrip(), - name=f'anat_skullstrip_{pipe_num}') - anat_skullstrip.inputs.outputtype = 'NIFTI_GZ' - - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, anat_skullstrip, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip, 'in_file') - - wf.connect(skullstrip_args, 'expr', anat_skullstrip, 'args') + mask_vol=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "mask_vol" + ], + shrink_factor=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "shrink_factor" + ], + var_shrink_fac=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "var_shrink_fac" + ], + shrink_fac_bot_lim=cfg.anatomical_preproc["brain_extraction"][ + "AFNI-3dSkullStrip" + ]["shrink_factor_bot_lim"], + avoid_vent=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "avoid_vent" + ], + niter=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "n_iterations" + ], + pushout=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "pushout" + ], + touchup=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "touchup" + ], + fill_hole=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "fill_hole" + ], + NN_smooth=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "NN_smooth" + ], + smooth_final=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "smooth_final" + ], + avoid_eyes=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "avoid_eyes" + ], + use_edge=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "use_edge" + ], + exp_frac=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "exp_frac" + ], + push_to_edge=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "push_to_edge" + ], + use_skull=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "use_skull" + ], + perc_int=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "perc_int" + ], + max_inter_iter=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "max_inter_iter" + ], + fac=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"]["fac"], + blur_fwhm=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "blur_fwhm" + ], + monkey=cfg.anatomical_preproc["brain_extraction"]["AFNI-3dSkullStrip"][ + "monkey" + ], + ) + + wf.connect( + [ + ( + inputnode_afni, + skullstrip_args, + [ + ("mask_vol", "mask_vol"), + ("shrink_factor", "shrink_fac"), + ("var_shrink_fac", "var_shrink_fac"), + ("shrink_fac_bot_lim", "shrink_fac_bot_lim"), + ("avoid_vent", "avoid_vent"), + ("niter", "niter"), + ("pushout", "pushout"), + ("touchup", "touchup"), + ("fill_hole", "fill_hole"), + ("avoid_eyes", "avoid_eyes"), + ("use_edge", "use_edge"), + ("exp_frac", "exp_frac"), + ("NN_smooth", "NN_smooth"), + ("smooth_final", "smooth_final"), + ("push_to_edge", "push_to_edge"), + ("use_skull", "use_skull"), + ("perc_int", "perc_int"), + ("max_inter_iter", "max_inter_iter"), + ("blur_fwhm", "blur_fwhm"), + ("fac", "fac"), + ("monkey", "monkey"), + ], + ) + ] + ) + + anat_skullstrip = pe.Node( + interface=afni.SkullStrip(), name=f"anat_skullstrip_{pipe_num}" + ) + anat_skullstrip.inputs.outputtype = "NIFTI_GZ" + + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, anat_skullstrip, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip, "in_file") + + wf.connect(skullstrip_args, "expr", anat_skullstrip, "args") # Generate anatomical brain mask - anat_brain_mask = pe.Node(interface=afni.Calc(), - name=f'anat_brain_mask_{pipe_num}') + anat_brain_mask = pe.Node(interface=afni.Calc(), name=f"anat_brain_mask_{pipe_num}") - anat_brain_mask.inputs.expr = 'step(a)' - anat_brain_mask.inputs.outputtype = 'NIFTI_GZ' + anat_brain_mask.inputs.expr = "step(a)" + anat_brain_mask.inputs.outputtype = "NIFTI_GZ" - wf.connect(anat_skullstrip, 'out_file', - anat_brain_mask, 'in_file_a') + wf.connect(anat_skullstrip, "out_file", anat_brain_mask, "in_file_a") - if strat_pool.check_rpool('desc-preproc_T1w'): - outputs = { - 'space-T1w_desc-brain_mask': (anat_brain_mask, 'out_file') - } + if strat_pool.check_rpool("desc-preproc_T1w"): + outputs = {"space-T1w_desc-brain_mask": (anat_brain_mask, "out_file")} - elif strat_pool.check_rpool('desc-preproc_T2w'): - outputs = { - 'space-T2w_desc-brain_mask': (anat_brain_mask, 'out_file') - } + elif strat_pool.check_rpool("desc-preproc_T2w"): + outputs = {"space-T2w_desc-brain_mask": (anat_brain_mask, "out_file")} return (wf, outputs) def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): inputnode_bet = pe.Node( - util.IdentityInterface(fields=['frac', - 'mask_boolean', - 'mesh_boolean', - 'outline', - 'padding', - 'radius', - 'reduce_bias', - 'remove_eyes', - 'robust', - 'skull', - 'surfaces', - 'threshold', - 'vertical_gradient']), - name=f'BET_options_{pipe_num}') - - + util.IdentityInterface( + fields=[ + "frac", + "mask_boolean", + "mesh_boolean", + "outline", + "padding", + "radius", + "reduce_bias", + "remove_eyes", + "robust", + "skull", + "surfaces", + "threshold", + "vertical_gradient", + ] + ), + name=f"BET_options_{pipe_num}", + ) + anat_skullstrip = pe.Node( - interface=fsl.BET(), name=f'anat_BET_skullstrip_{pipe_num}') - anat_skullstrip.inputs.output_type = 'NIFTI_GZ' + interface=fsl.BET(), name=f"anat_BET_skullstrip_{pipe_num}" + ) + anat_skullstrip.inputs.output_type = "NIFTI_GZ" inputnode_bet.inputs.set( - frac=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['frac'], - mask_boolean= True, - mesh_boolean= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['mesh_boolean'], - outline=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['outline'], - padding=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['padding'], - radius=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['radius'], - reduce_bias= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['reduce_bias'], - remove_eyes= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['remove_eyes'], - robust=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['robust'], - skull=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['skull'], - surfaces=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['surfaces'], - threshold=cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['threshold'], - vertical_gradient= - cfg.anatomical_preproc['brain_extraction'][ - 'FSL-BET']['vertical_gradient'], - ) - + frac=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["frac"], + mask_boolean=True, + mesh_boolean=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "mesh_boolean" + ], + outline=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["outline"], + padding=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["padding"], + radius=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["radius"], + reduce_bias=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "reduce_bias" + ], + remove_eyes=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "remove_eyes" + ], + robust=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["robust"], + skull=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["skull"], + surfaces=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["surfaces"], + threshold=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["threshold"], + vertical_gradient=cfg.anatomical_preproc["brain_extraction"]["FSL-BET"][ + "vertical_gradient" + ], + ) + anat_robustfov = pe.Node( - interface=fsl.RobustFOV(), name=f'anat_RobustFOV_{pipe_num}') - - anat_robustfov.inputs.output_type = 'NIFTI_GZ' - - anat_pad_RobustFOV_cropped = pe.Node(util.Function(input_names=['cropped_image_path', 'target_image_path'], - output_names=['padded_image_path'], - function=pad), - name=f'anat_pad_mask_{pipe_num}' - ) - - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - if cfg.anatomical_preproc['brain_extraction']['FSL-BET']['Robustfov']: - wf.connect(node, out, anat_robustfov, 'in_file') - wf.connect(node, out, anat_pad_RobustFOV_cropped, 'target_image_path') - wf.connect(anat_robustfov, 'out_roi', anat_pad_RobustFOV_cropped, 'cropped_image_path') - wf.connect(anat_pad_RobustFOV_cropped, 'padded_image_path', anat_skullstrip,'in_file') - else : - wf.connect(node, out, anat_skullstrip, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - if cfg.anatomical_preproc['brain_extraction']['FSL-BET']['Robustfov']: - wf.connect(node, out, anat_robustfov, 'in_file') - wf.connect(node, out, anat_pad_RobustFOV_cropped, 'target_image_path') - wf.connect(anat_robustfov, 'out_roi', anat_pad_RobustFOV_cropped, 'cropped_image_path') - wf.connect(anat_pad_RobustFOV_cropped, 'padded_image_path', anat_skullstrip,'in_file') - else : - wf.connect(node, out, anat_skullstrip, 'in_file') - - wf.connect([ - (inputnode_bet, anat_skullstrip, [ - ('frac', 'frac'), - ('mask_boolean', 'mask'), - ('mesh_boolean', 'mesh'), - ('outline', 'outline'), - ('padding', 'padding'), - ('radius', 'radius'), - ('reduce_bias', 'reduce_bias'), - ('remove_eyes', 'remove_eyes'), - ('robust', 'robust'), - ('skull', 'skull'), - ('surfaces', 'surfaces'), - ('threshold', 'threshold'), - ('vertical_gradient', 'vertical_gradient'), - ]) - ]) - - if strat_pool.check_rpool('desc-preproc_T1w'): - outputs = { - 'space-T1w_desc-brain_mask': (anat_skullstrip, 'mask_file') - } + interface=fsl.RobustFOV(), name=f"anat_RobustFOV_{pipe_num}" + ) - elif strat_pool.check_rpool('desc-preproc_T2w'): - outputs = { - 'space-T2w_desc-brain_mask': (anat_skullstrip, 'mask_file') - } + anat_robustfov.inputs.output_type = "NIFTI_GZ" + + anat_pad_RobustFOV_cropped = pe.Node( + util.Function( + input_names=["cropped_image_path", "target_image_path"], + output_names=["padded_image_path"], + function=pad, + ), + name=f"anat_pad_mask_{pipe_num}", + ) + + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + if cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["Robustfov"]: + wf.connect(node, out, anat_robustfov, "in_file") + wf.connect(node, out, anat_pad_RobustFOV_cropped, "target_image_path") + wf.connect( + anat_robustfov, + "out_roi", + anat_pad_RobustFOV_cropped, + "cropped_image_path", + ) + wf.connect( + anat_pad_RobustFOV_cropped, + "padded_image_path", + anat_skullstrip, + "in_file", + ) + else: + wf.connect(node, out, anat_skullstrip, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + if cfg.anatomical_preproc["brain_extraction"]["FSL-BET"]["Robustfov"]: + wf.connect(node, out, anat_robustfov, "in_file") + wf.connect(node, out, anat_pad_RobustFOV_cropped, "target_image_path") + wf.connect( + anat_robustfov, + "out_roi", + anat_pad_RobustFOV_cropped, + "cropped_image_path", + ) + wf.connect( + anat_pad_RobustFOV_cropped, + "padded_image_path", + anat_skullstrip, + "in_file", + ) + else: + wf.connect(node, out, anat_skullstrip, "in_file") + + wf.connect( + [ + ( + inputnode_bet, + anat_skullstrip, + [ + ("frac", "frac"), + ("mask_boolean", "mask"), + ("mesh_boolean", "mesh"), + ("outline", "outline"), + ("padding", "padding"), + ("radius", "radius"), + ("reduce_bias", "reduce_bias"), + ("remove_eyes", "remove_eyes"), + ("robust", "robust"), + ("skull", "skull"), + ("surfaces", "surfaces"), + ("threshold", "threshold"), + ("vertical_gradient", "vertical_gradient"), + ], + ) + ] + ) + + if strat_pool.check_rpool("desc-preproc_T1w"): + outputs = {"space-T1w_desc-brain_mask": (anat_skullstrip, "mask_file")} + + elif strat_pool.check_rpool("desc-preproc_T2w"): + outputs = {"space-T2w_desc-brain_mask": (anat_skullstrip, "mask_file")} return (wf, outputs) @@ -711,37 +846,44 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Skull-stripping using niworkflows-ants anat_skullstrip_ants = init_brain_extraction_wf( - tpl_target_path=cfg.anatomical_preproc['brain_extraction'][ - 'niworkflows-ants'][ - 'template_path'], - tpl_mask_path=cfg.anatomical_preproc['brain_extraction'][ - 'niworkflows-ants'][ - 'mask_path'], - tpl_regmask_path=cfg.anatomical_preproc['brain_extraction'][ - 'niworkflows-ants'][ - 'regmask_path'], - name='anat_skullstrip_ants', - atropos_use_random_seed=cfg.pipeline_setup['system_config'][ - 'random_seed'] is None) - - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, anat_skullstrip_ants, 'inputnode.in_files') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip_ants, 'inputnode.in_files') - - if strat_pool.check_rpool('desc-preproc_T1w'): + tpl_target_path=cfg.anatomical_preproc["brain_extraction"]["niworkflows-ants"][ + "template_path" + ], + tpl_mask_path=cfg.anatomical_preproc["brain_extraction"]["niworkflows-ants"][ + "mask_path" + ], + tpl_regmask_path=cfg.anatomical_preproc["brain_extraction"]["niworkflows-ants"][ + "regmask_path" + ], + name="anat_skullstrip_ants", + atropos_use_random_seed=cfg.pipeline_setup["system_config"]["random_seed"] + is None, + ) + + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, anat_skullstrip_ants, "inputnode.in_files") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip_ants, "inputnode.in_files") + + if strat_pool.check_rpool("desc-preproc_T1w"): outputs = { - 'space-T1w_desc-brain_mask': (anat_skullstrip_ants, 'atropos_wf.copy_xform.out_mask'), - 'desc-preproc_T1w': (anat_skullstrip_ants, 'copy_xform.out_file') + "space-T1w_desc-brain_mask": ( + anat_skullstrip_ants, + "atropos_wf.copy_xform.out_mask", + ), + "desc-preproc_T1w": (anat_skullstrip_ants, "copy_xform.out_file"), } - elif strat_pool.check_rpool('desc-preproc_T2w'): + elif strat_pool.check_rpool("desc-preproc_T2w"): outputs = { - 'space-T2w_desc-brain_mask': (anat_skullstrip_ants, 'atropos_wf.copy_xform.out_mask'), - 'desc-preproc_T2w': (anat_skullstrip_ants, 'copy_xform.out_file') + "space-T2w_desc-brain_mask": ( + anat_skullstrip_ants, + "atropos_wf.copy_xform.out_mask", + ), + "desc-preproc_T2w": (anat_skullstrip_ants, "copy_xform.out_file"), } return (wf, outputs) @@ -757,142 +899,169 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): rescale_dim: 256 """ from CPAC.unet.function import predict_volumes - unet_mask = pe.Node(util.Function(input_names=['model_path', 'cimg_in'], - output_names=['out_path'], - function=predict_volumes), - name=f'unet_mask_{pipe_num}') - node, out = strat_pool.get_data('unet-model') - wf.connect(node, out, unet_mask, 'model_path') + unet_mask = pe.Node( + util.Function( + input_names=["model_path", "cimg_in"], + output_names=["out_path"], + function=predict_volumes, + ), + name=f"unet_mask_{pipe_num}", + ) + + node, out = strat_pool.get_data("unet-model") + wf.connect(node, out, unet_mask, "model_path") - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, unet_mask, 'cimg_in') + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, unet_mask, "cimg_in") - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, unet_mask, 'cimg_in') + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, unet_mask, "cimg_in") """ Revised mask with ANTs """ # fslmaths -mul brain.nii.gz - unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'unet_masked_brain_{pipe_num}') + unet_masked_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"unet_masked_brain_{pipe_num}" + ) unet_masked_brain.inputs.op_string = "-mul %s" - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, unet_masked_brain, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, unet_masked_brain, 'in_file') + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, unet_masked_brain, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, unet_masked_brain, "in_file") - wf.connect(unet_mask, 'out_path', unet_masked_brain, 'operand_files') + wf.connect(unet_mask, "out_path", unet_masked_brain, "operand_files") # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc - native_brain_to_template_brain = pe.Node(interface=fsl.FLIRT(), - name=f'native_brain_to_template_' - f'brain_{pipe_num}') + native_brain_to_template_brain = pe.Node( + interface=fsl.FLIRT(), name=f"native_brain_to_template_" f"brain_{pipe_num}" + ) native_brain_to_template_brain.inputs.dof = 6 - native_brain_to_template_brain.inputs.interp = 'sinc' - wf.connect(unet_masked_brain, 'out_file', - native_brain_to_template_brain, 'in_file') + native_brain_to_template_brain.inputs.interp = "sinc" + wf.connect(unet_masked_brain, "out_file", native_brain_to_template_brain, "in_file") - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, native_brain_to_template_brain, 'reference') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, native_brain_to_template_brain, "reference") # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat - native_head_to_template_head = pe.Node(interface=fsl.FLIRT(), - name=f'native_head_to_template_' - f'head_{pipe_num}') + native_head_to_template_head = pe.Node( + interface=fsl.FLIRT(), name=f"native_head_to_template_" f"head_{pipe_num}" + ) native_head_to_template_head.inputs.apply_xfm = True - if strat_pool.check_rpool('desc-preproc_T1w'): - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, native_head_to_template_head, 'in_file') - - elif strat_pool.check_rpool('desc-preproc_T2w'): - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, native_head_to_template_head, 'in_file') + if strat_pool.check_rpool("desc-preproc_T1w"): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, native_head_to_template_head, "in_file") + + elif strat_pool.check_rpool("desc-preproc_T2w"): + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, native_head_to_template_head, "in_file") - wf.connect(native_brain_to_template_brain, 'out_matrix_file', - native_head_to_template_head, 'in_matrix_file') + wf.connect( + native_brain_to_template_brain, + "out_matrix_file", + native_head_to_template_head, + "in_matrix_file", + ) - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, native_head_to_template_head, 'reference') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, native_head_to_template_head, "reference") # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz - template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'template_brain_mask_{pipe_num}') - template_brain_mask.inputs.args = '-bin' + template_brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"template_brain_mask_{pipe_num}" + ) + template_brain_mask.inputs.args = "-bin" - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, template_brain_mask, 'in_file') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, template_brain_mask, "in_file") # ANTS 3 -m CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000 - ants_template_head_to_template = pe.Node(interface=ants.Registration(), - name=f'template_head_to_' - f'template_{pipe_num}') - ants_template_head_to_template.inputs.metric = ['CC'] + ants_template_head_to_template = pe.Node( + interface=ants.Registration(), name=f"template_head_to_" f"template_{pipe_num}" + ) + ants_template_head_to_template.inputs.metric = ["CC"] ants_template_head_to_template.inputs.metric_weight = [1, 5] - ants_template_head_to_template.inputs.transforms = ['SyN'] + ants_template_head_to_template.inputs.transforms = ["SyN"] ants_template_head_to_template.inputs.transform_parameters = [(0.25,)] - ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor' - ants_template_head_to_template.inputs.number_of_iterations = [ - [60, 50, 20]] + ants_template_head_to_template.inputs.interpolation = "NearestNeighbor" + ants_template_head_to_template.inputs.number_of_iterations = [[60, 50, 20]] ants_template_head_to_template.inputs.smoothing_sigmas = [[0.6, 0.2, 0.0]] ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]] - ants_template_head_to_template.inputs.convergence_threshold = [1.e-8] - wf.connect(native_head_to_template_head, 'out_file', - ants_template_head_to_template, 'fixed_image') + ants_template_head_to_template.inputs.convergence_threshold = [1.0e-8] + wf.connect( + native_head_to_template_head, + "out_file", + ants_template_head_to_template, + "fixed_image", + ) - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, ants_template_head_to_template, 'moving_image') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, ants_template_head_to_template, "moving_image") # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz template_head_transform_to_template = pe.Node( interface=ants.ApplyTransforms(), - name=f'template_head_transform_to_template_{pipe_num}') + name=f"template_head_transform_to_template_{pipe_num}", + ) template_head_transform_to_template.inputs.dimension = 3 - wf.connect(template_brain_mask, 'out_file', - template_head_transform_to_template, 'input_image') - wf.connect(native_brain_to_template_brain, 'out_file', - template_head_transform_to_template, 'reference_image') - wf.connect(ants_template_head_to_template, 'forward_transforms', - template_head_transform_to_template, 'transforms') + wf.connect( + template_brain_mask, + "out_file", + template_head_transform_to_template, + "input_image", + ) + wf.connect( + native_brain_to_template_brain, + "out_file", + template_head_transform_to_template, + "reference_image", + ) + wf.connect( + ants_template_head_to_template, + "forward_transforms", + template_head_transform_to_template, + "transforms", + ) - # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat  - invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') + # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat + invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") invt.inputs.invert_xfm = True - wf.connect(native_brain_to_template_brain, 'out_matrix_file', invt, - 'in_file') + wf.connect(native_brain_to_template_brain, "out_matrix_file", invt, "in_file") # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat - template_brain_to_native_brain = pe.Node(interface=fsl.FLIRT(), - name=f'template_brain_to_native_' - f'brain_{pipe_num}') + template_brain_to_native_brain = pe.Node( + interface=fsl.FLIRT(), name=f"template_brain_to_native_" f"brain_{pipe_num}" + ) template_brain_to_native_brain.inputs.apply_xfm = True - wf.connect(template_head_transform_to_template, 'output_image', - template_brain_to_native_brain, 'in_file') - wf.connect(unet_masked_brain, 'out_file', template_brain_to_native_brain, - 'reference') - wf.connect(invt, 'out_file', template_brain_to_native_brain, - 'in_matrix_file') + wf.connect( + template_head_transform_to_template, + "output_image", + template_brain_to_native_brain, + "in_file", + ) + wf.connect( + unet_masked_brain, "out_file", template_brain_to_native_brain, "reference" + ) + wf.connect(invt, "out_file", template_brain_to_native_brain, "in_matrix_file") # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz - refined_mask = pe.Node(interface=fsl.Threshold(), name=f'refined_mask' - f'_{pipe_num}') + refined_mask = pe.Node( + interface=fsl.Threshold(), name=f"refined_mask" f"_{pipe_num}" + ) refined_mask.inputs.thresh = 0.5 - refined_mask.inputs.args = '-bin' - wf.connect(template_brain_to_native_brain, 'out_file', refined_mask, - 'in_file') + refined_mask.inputs.args = "-bin" + wf.connect(template_brain_to_native_brain, "out_file", refined_mask, "in_file") - outputs = { - 'space-T1w_desc-brain_mask': (refined_mask, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (refined_mask, "out_file")} return (wf, outputs) @@ -900,365 +1069,374 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # register FS brain mask to native space fs_brain_mask_to_native = pe.Node( - interface=freesurfer.ApplyVolTransform(), - name='fs_brain_mask_to_native') + interface=freesurfer.ApplyVolTransform(), name="fs_brain_mask_to_native" + ) fs_brain_mask_to_native.inputs.reg_header = True - node, out = strat_pool.get_data('pipeline-fs_brainmask') - wf.connect(node, out, fs_brain_mask_to_native, 'source_file') + node, out = strat_pool.get_data("pipeline-fs_brainmask") + wf.connect(node, out, fs_brain_mask_to_native, "source_file") - node, out = strat_pool.get_data('pipeline-fs_raw-average') - wf.connect(node, out, fs_brain_mask_to_native, 'target_file') + node, out = strat_pool.get_data("pipeline-fs_raw-average") + wf.connect(node, out, fs_brain_mask_to_native, "target_file") - node, out = strat_pool.get_data('freesurfer-subject-dir') - wf.connect(node, out, fs_brain_mask_to_native, 'subjects_dir') + node, out = strat_pool.get_data("freesurfer-subject-dir") + wf.connect(node, out, fs_brain_mask_to_native, "subjects_dir") # convert brain mask file from .mgz to .nii.gz - fs_brain_mask_to_nifti = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=mri_convert), - name=f'fs_brainmask_to_nifti_{pipe_num}') - wf.connect(fs_brain_mask_to_native, 'transformed_file', - fs_brain_mask_to_nifti, 'in_file') + fs_brain_mask_to_nifti = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=mri_convert + ), + name=f"fs_brainmask_to_nifti_{pipe_num}", + ) + wf.connect( + fs_brain_mask_to_native, "transformed_file", fs_brain_mask_to_nifti, "in_file" + ) # binarize the brain mask - binarize_fs_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_fs_brainmask_{pipe_num}') - binarize_fs_brain_mask.inputs.args = '-bin' - wf.connect(fs_brain_mask_to_nifti, 'out_file', - binarize_fs_brain_mask, 'in_file') + binarize_fs_brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_fs_brainmask_{pipe_num}" + ) + binarize_fs_brain_mask.inputs.args = "-bin" + wf.connect(fs_brain_mask_to_nifti, "out_file", binarize_fs_brain_mask, "in_file") # fill holes - fill_fs_brain_mask = pe.Node(interface=afni.MaskTool(), - name=f'fill_fs_brainmask_{pipe_num}') + fill_fs_brain_mask = pe.Node( + interface=afni.MaskTool(), name=f"fill_fs_brainmask_{pipe_num}" + ) fill_fs_brain_mask.inputs.fill_holes = True - fill_fs_brain_mask.inputs.outputtype = 'NIFTI_GZ' - wf.connect(binarize_fs_brain_mask, 'out_file', - fill_fs_brain_mask, 'in_file') + fill_fs_brain_mask.inputs.outputtype = "NIFTI_GZ" + wf.connect(binarize_fs_brain_mask, "out_file", fill_fs_brain_mask, "in_file") - outputs = { - 'space-T1w_desc-brain_mask': (fill_fs_brain_mask, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (fill_fs_brain_mask, "out_file")} return (wf, outputs) def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): - ''' + """ ABCD harmonization - anatomical brain mask generation Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 - ''' - - wmparc_to_nifti = pe.Node(util.Function(input_names=['in_file', - 'reslice_like', - 'args'], - output_names=['out_file'], - function=mri_convert), - name=f'wmparc_to_nifti_{pipe_num}') - + """ + wmparc_to_nifti = pe.Node( + util.Function( + input_names=["in_file", "reslice_like", "args"], + output_names=["out_file"], + function=mri_convert, + ), + name=f"wmparc_to_nifti_{pipe_num}", + ) + # Register wmparc file if ingressing FreeSurfer data - if strat_pool.check_rpool('pipeline-fs_xfm'): + if strat_pool.check_rpool("pipeline-fs_xfm"): + wmparc_to_native = pe.Node( + util.Function( + input_names=["source_file", "target_file", "xfm", "out_file"], + output_names=["transformed_file"], + function=normalize_wmparc, + ), + name=f"wmparc_to_native_{pipe_num}", + ) - wmparc_to_native = pe.Node(util.Function(input_names=['source_file', - 'target_file', - 'xfm', - 'out_file'], - output_names=['transformed_file'], - function=normalize_wmparc), - name=f'wmparc_to_native_{pipe_num}') - - wmparc_to_native.inputs.out_file = 'wmparc_warped.mgz' + wmparc_to_native.inputs.out_file = "wmparc_warped.mgz" - node, out = strat_pool.get_data('pipeline-fs_wmparc') - wf.connect(node, out, wmparc_to_native, 'source_file') + node, out = strat_pool.get_data("pipeline-fs_wmparc") + wf.connect(node, out, wmparc_to_native, "source_file") - node, out = strat_pool.get_data('pipeline-fs_raw-average') - wf.connect(node, out, wmparc_to_native, 'target_file') + node, out = strat_pool.get_data("pipeline-fs_raw-average") + wf.connect(node, out, wmparc_to_native, "target_file") - node, out = strat_pool.get_data('pipeline-fs_xfm') - wf.connect(node, out, wmparc_to_native, 'xfm') + node, out = strat_pool.get_data("pipeline-fs_xfm") + wf.connect(node, out, wmparc_to_native, "xfm") + + wf.connect(wmparc_to_native, "transformed_file", wmparc_to_nifti, "in_file") - wf.connect(wmparc_to_native, 'transformed_file', wmparc_to_nifti, 'in_file') - else: - - node, out = strat_pool.get_data('pipeline-fs_wmparc') - wf.connect(node, out, wmparc_to_nifti, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_wmparc") + wf.connect(node, out, wmparc_to_nifti, "in_file") - wmparc_to_nifti.inputs.args = '-rt nearest' + wmparc_to_nifti.inputs.args = "-rt nearest" - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, wmparc_to_nifti, 'reslice_like') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, wmparc_to_nifti, "reslice_like") - binary_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_wmparc_{pipe_num}') - binary_mask.inputs.args = '-bin -dilD -dilD -dilD -ero -ero' + binary_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_wmparc_{pipe_num}" + ) + binary_mask.inputs.args = "-bin -dilD -dilD -dilD -ero -ero" - wf.connect(wmparc_to_nifti, 'out_file', binary_mask, 'in_file') + wf.connect(wmparc_to_nifti, "out_file", binary_mask, "in_file") - wb_command_fill_holes = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=wb_command), - name=f'wb_command_fill_holes_{pipe_num}') + wb_command_fill_holes = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=wb_command + ), + name=f"wb_command_fill_holes_{pipe_num}", + ) - wf.connect(binary_mask, 'out_file', wb_command_fill_holes, 'in_file') + wf.connect(binary_mask, "out_file", wb_command_fill_holes, "in_file") - binary_filled_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_filled_wmparc_{pipe_num}') - binary_filled_mask.inputs.args = '-bin' + binary_filled_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_filled_wmparc_{pipe_num}" + ) + binary_filled_mask.inputs.args = "-bin" - wf.connect(wb_command_fill_holes, 'out_file', - binary_filled_mask, 'in_file') + wf.connect(wb_command_fill_holes, "out_file", binary_filled_mask, "in_file") - brain_mask_to_t1_restore = pe.Node(interface=fsl.ApplyWarp(), - name=f'brain_mask_to_t1_restore_{pipe_num}') - brain_mask_to_t1_restore.inputs.interp = 'nn' - brain_mask_to_t1_restore.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + brain_mask_to_t1_restore = pe.Node( + interface=fsl.ApplyWarp(), name=f"brain_mask_to_t1_restore_{pipe_num}" + ) + brain_mask_to_t1_restore.inputs.interp = "nn" + brain_mask_to_t1_restore.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - wf.connect(binary_filled_mask, 'out_file', - brain_mask_to_t1_restore, 'in_file') + wf.connect(binary_filled_mask, "out_file", brain_mask_to_t1_restore, "in_file") - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, brain_mask_to_t1_restore, 'ref_file') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, brain_mask_to_t1_restore, "ref_file") - outputs = { - 'space-T1w_desc-brain_mask': (brain_mask_to_t1_restore, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (brain_mask_to_t1_restore, "out_file")} return (wf, outputs) def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): - - node_id = f'{opt.lower()}_{pipe_num}' + node_id = f"{opt.lower()}_{pipe_num}" # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz - convert_fs_brainmask_to_nifti = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=mri_convert), - name=f'convert_fs_brainmask_to_nifti_{node_id}') + convert_fs_brainmask_to_nifti = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=mri_convert + ), + name=f"convert_fs_brainmask_to_nifti_{node_id}", + ) - node, out = strat_pool.get_data('pipeline-fs_brainmask') - wf.connect(node, out, convert_fs_brainmask_to_nifti, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_brainmask") + wf.connect(node, out, convert_fs_brainmask_to_nifti, "in_file") # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/T1.mgz -ot nii T1.nii.gz - convert_fs_T1_to_nifti = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=mri_convert), - name=f'convert_fs_T1_to_nifti_{node_id}') + convert_fs_T1_to_nifti = pe.Node( + util.Function( + input_names=["in_file"], output_names=["out_file"], function=mri_convert + ), + name=f"convert_fs_T1_to_nifti_{node_id}", + ) - node, out = strat_pool.get_data('pipeline-fs_T1') - wf.connect(node, out, convert_fs_T1_to_nifti, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_T1") + wf.connect(node, out, convert_fs_T1_to_nifti, "in_file") # 3dresample -orient RPI -inset brainmask.nii.gz -prefix brain_fs.nii.gz - reorient_fs_brainmask = pe.Node(interface=afni.Resample(), - name=f'reorient_fs_brainmask_{node_id}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - reorient_fs_brainmask.inputs.orientation = 'RPI' - reorient_fs_brainmask.inputs.outputtype = 'NIFTI_GZ' + reorient_fs_brainmask = pe.Node( + interface=afni.Resample(), + name=f"reorient_fs_brainmask_{node_id}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + reorient_fs_brainmask.inputs.orientation = "RPI" + reorient_fs_brainmask.inputs.outputtype = "NIFTI_GZ" - wf.connect(convert_fs_brainmask_to_nifti, 'out_file', - reorient_fs_brainmask, 'in_file') + wf.connect( + convert_fs_brainmask_to_nifti, "out_file", reorient_fs_brainmask, "in_file" + ) # fslmaths brain_fs.nii.gz -abs -bin brain_fs_mask.nii.gz - binarize_fs_brain = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_fs_brain_{node_id}') - binarize_fs_brain.inputs.args = '-abs -bin' + binarize_fs_brain = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_fs_brain_{node_id}" + ) + binarize_fs_brain.inputs.args = "-abs -bin" - wf.connect(reorient_fs_brainmask, 'out_file', - binarize_fs_brain, 'in_file') + wf.connect(reorient_fs_brainmask, "out_file", binarize_fs_brain, "in_file") # 3dresample -orient RPI -inset T1.nii.gz -prefix head_fs.nii.gz - reorient_fs_T1 = pe.Node(interface=afni.Resample(), - name=f'reorient_fs_T1_{node_id}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - reorient_fs_T1.inputs.orientation = 'RPI' - reorient_fs_T1.inputs.outputtype = 'NIFTI_GZ' + reorient_fs_T1 = pe.Node( + interface=afni.Resample(), + name=f"reorient_fs_T1_{node_id}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + reorient_fs_T1.inputs.orientation = "RPI" + reorient_fs_T1.inputs.outputtype = "NIFTI_GZ" - wf.connect(convert_fs_T1_to_nifti, 'out_file', - reorient_fs_T1, 'in_file') + wf.connect(convert_fs_T1_to_nifti, "out_file", reorient_fs_T1, "in_file") # flirt -in head_fs.nii.gz -ref ${FSLDIR}/data/standard/MNI152_T1_1mm.nii.gz \ # -out tmp_head_fs2standard.nii.gz -omat tmp_head_fs2standard.mat -bins 256 -cost corratio \ # -searchrx -90 90 -searchry -90 90 -searchrz -90 90 -dof 12 -interp trilinear - convert_head_to_template = pe.Node(interface=fsl.FLIRT(), - name=f'convert_head_to_template_{node_id}') - convert_head_to_template.inputs.cost = 'corratio' - convert_head_to_template.inputs.interp = 'trilinear' + convert_head_to_template = pe.Node( + interface=fsl.FLIRT(), name=f"convert_head_to_template_{node_id}" + ) + convert_head_to_template.inputs.cost = "corratio" + convert_head_to_template.inputs.interp = "trilinear" convert_head_to_template.inputs.bins = 256 convert_head_to_template.inputs.dof = 12 convert_head_to_template.inputs.searchr_x = [-90, 90] convert_head_to_template.inputs.searchr_y = [-90, 90] convert_head_to_template.inputs.searchr_z = [-90, 90] - wf.connect(reorient_fs_T1, 'out_file', - convert_head_to_template, 'in_file') + wf.connect(reorient_fs_T1, "out_file", convert_head_to_template, "in_file") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, convert_head_to_template, 'reference') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, convert_head_to_template, "reference") # convert_xfm -omat tmp_standard2head_fs.mat -inverse tmp_head_fs2standard.mat - convert_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name=f'convert_xfm_{node_id}') + convert_xfm = pe.Node( + interface=fsl_utils.ConvertXFM(), name=f"convert_xfm_{node_id}" + ) convert_xfm.inputs.invert_xfm = True - wf.connect(convert_head_to_template, 'out_matrix_file', - convert_xfm, 'in_file') + wf.connect(convert_head_to_template, "out_matrix_file", convert_xfm, "in_file") # bet tmp_head_fs2standard.nii.gz tmp.nii.gz -f ${bet_thr_tight} -m - skullstrip = pe.Node(interface=fsl.BET(), - name=f'anat_BET_skullstrip_{node_id}') - skullstrip.inputs.output_type = 'NIFTI_GZ' - skullstrip.inputs.mask=True - - if opt == 'FreeSurfer-BET-Tight': - skullstrip.inputs.frac=0.3 - elif opt == 'FreeSurfer-BET-Loose': - skullstrip.inputs.frac=0.1 - - wf.connect(convert_head_to_template, 'out_file', - skullstrip, 'in_file') - + skullstrip = pe.Node(interface=fsl.BET(), name=f"anat_BET_skullstrip_{node_id}") + skullstrip.inputs.output_type = "NIFTI_GZ" + skullstrip.inputs.mask = True + + if opt == "FreeSurfer-BET-Tight": + skullstrip.inputs.frac = 0.3 + elif opt == "FreeSurfer-BET-Loose": + skullstrip.inputs.frac = 0.1 + + wf.connect(convert_head_to_template, "out_file", skullstrip, "in_file") + # fslmaths tmp_mask.nii.gz -mas ${CCSDIR}/templates/MNI152_T1_1mm_first_brain_mask.nii.gz tmp_mask.nii.gz - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'apply_mask_{node_id}') + apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), name=f"apply_mask_{node_id}") - wf.connect(skullstrip, 'out_file', - apply_mask, 'in_file') + wf.connect(skullstrip, "out_file", apply_mask, "in_file") - node, out = strat_pool.get_data('T1w-brain-template-mask-ccs') - wf.connect(node, out, apply_mask, 'mask_file') + node, out = strat_pool.get_data("T1w-brain-template-mask-ccs") + wf.connect(node, out, apply_mask, "mask_file") # flirt -in tmp_mask.nii.gz -applyxfm -init tmp_standard2head_fs.mat -out brain_fsl_mask_tight.nii.gz \ # -paddingsize 0.0 -interp nearestneighbour -ref head_fs.nii.gz - convert_template_mask_to_native = pe.Node(interface=fsl.FLIRT(), - name=f'convert_template_mask_to_native_{node_id}') + convert_template_mask_to_native = pe.Node( + interface=fsl.FLIRT(), name=f"convert_template_mask_to_native_{node_id}" + ) convert_template_mask_to_native.inputs.apply_xfm = True convert_template_mask_to_native.inputs.padding_size = 0 - convert_template_mask_to_native.inputs.interp = 'nearestneighbour' + convert_template_mask_to_native.inputs.interp = "nearestneighbour" - wf.connect(apply_mask, 'out_file', - convert_template_mask_to_native, 'in_file') + wf.connect(apply_mask, "out_file", convert_template_mask_to_native, "in_file") - wf.connect(convert_xfm, 'out_file', - convert_template_mask_to_native, 'in_matrix_file') + wf.connect( + convert_xfm, "out_file", convert_template_mask_to_native, "in_matrix_file" + ) - wf.connect(reorient_fs_T1, 'out_file', - convert_template_mask_to_native, 'reference') + wf.connect(reorient_fs_T1, "out_file", convert_template_mask_to_native, "reference") # fslmaths brain_fs_mask.nii.gz -add brain_fsl_mask_tight.nii.gz -bin brain_mask_tight.nii.gz - # BinaryMaths doesn't use -bin! - combine_mask = pe.Node(interface=fsl.BinaryMaths(), - name=f'combine_mask_{node_id}') + # BinaryMaths doesn't use -bin! + combine_mask = pe.Node(interface=fsl.BinaryMaths(), name=f"combine_mask_{node_id}") - if opt == 'FreeSurfer-BET-Tight': - combine_mask.inputs.operation = 'add' - elif opt == 'FreeSurfer-BET-Loose': - combine_mask.inputs.operation = 'mul' + if opt == "FreeSurfer-BET-Tight": + combine_mask.inputs.operation = "add" + elif opt == "FreeSurfer-BET-Loose": + combine_mask.inputs.operation = "mul" - wf.connect(binarize_fs_brain, 'out_file', - combine_mask, 'in_file') + wf.connect(binarize_fs_brain, "out_file", combine_mask, "in_file") - wf.connect(convert_template_mask_to_native, 'out_file', - combine_mask, 'operand_file') + wf.connect( + convert_template_mask_to_native, "out_file", combine_mask, "operand_file" + ) - binarize_combined_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_combined_mask_{node_id}') - binarize_combined_mask.inputs.args = '-bin' + binarize_combined_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_combined_mask_{node_id}" + ) + binarize_combined_mask.inputs.args = "-bin" - wf.connect(combine_mask, 'out_file', - binarize_combined_mask, 'in_file') + wf.connect(combine_mask, "out_file", binarize_combined_mask, "in_file") # CCS brain mask is in FS space, transfer it back to native T1 space - fs_fsl_brain_mask_to_native = pe.Node(interface=freesurfer.ApplyVolTransform(), - name=f'fs_fsl_brain_mask_to_native_{node_id}') + fs_fsl_brain_mask_to_native = pe.Node( + interface=freesurfer.ApplyVolTransform(), + name=f"fs_fsl_brain_mask_to_native_{node_id}", + ) fs_fsl_brain_mask_to_native.inputs.reg_header = True - fs_fsl_brain_mask_to_native.inputs.interp = 'nearest' + fs_fsl_brain_mask_to_native.inputs.interp = "nearest" - wf.connect(binarize_combined_mask, 'out_file', - fs_fsl_brain_mask_to_native, 'source_file') + wf.connect( + binarize_combined_mask, "out_file", fs_fsl_brain_mask_to_native, "source_file" + ) - node, out = strat_pool.get_data('pipeline-fs_raw-average') - wf.connect(node, out, fs_fsl_brain_mask_to_native, 'target_file') + node, out = strat_pool.get_data("pipeline-fs_raw-average") + wf.connect(node, out, fs_fsl_brain_mask_to_native, "target_file") - node, out = strat_pool.get_data('freesurfer-subject-dir') - wf.connect(node, out, fs_fsl_brain_mask_to_native, 'subjects_dir') + node, out = strat_pool.get_data("freesurfer-subject-dir") + wf.connect(node, out, fs_fsl_brain_mask_to_native, "subjects_dir") - if opt == 'FreeSurfer-BET-Tight': + if opt == "FreeSurfer-BET-Tight": outputs = { - 'space-T1w_desc-tight_brain_mask': (fs_fsl_brain_mask_to_native, 'transformed_file') + "space-T1w_desc-tight_brain_mask": ( + fs_fsl_brain_mask_to_native, + "transformed_file", + ) } - elif opt == 'FreeSurfer-BET-Loose': + elif opt == "FreeSurfer-BET-Loose": outputs = { - 'space-T1w_desc-loose_brain_mask': (fs_fsl_brain_mask_to_native, 'transformed_file') + "space-T1w_desc-loose_brain_mask": ( + fs_fsl_brain_mask_to_native, + "transformed_file", + ) } return (wf, outputs) -def mask_T2(wf_name='mask_T2'): +def mask_T2(wf_name="mask_T2"): # create T2 mask based on T1 mask # reference https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreliminaryMasking/macaque_masking.py - + preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['T1w', - 'T1w_mask', - 'T2w']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["T1w", "T1w_mask", "T2w"]), name="inputspec" + ) - outputnode = pe.Node(util.IdentityInterface(fields=['T1w_brain', - 'T2w_mask', - 'T2w_brain']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["T1w_brain", "T2w_mask", "T2w_brain"]), + name="outputspec", + ) # mask_t1w = 'fslmaths {t1w} -mas {t1w_mask_edit} {t1w_brain}'.format(**kwargs) - mask_t1w = pe.Node(interface=fsl.MultiImageMaths(), - name='mask_t1w') + mask_t1w = pe.Node(interface=fsl.MultiImageMaths(), name="mask_t1w") mask_t1w.inputs.op_string = "-mas %s " - preproc.connect(inputnode, 'T1w', mask_t1w, 'in_file') - preproc.connect(inputnode, 'T1w_mask', mask_t1w, 'operand_files') - + preproc.connect(inputnode, "T1w", mask_t1w, "in_file") + preproc.connect(inputnode, "T1w_mask", mask_t1w, "operand_files") # t1w2t2w_rigid = 'flirt -dof 6 -cost mutualinfo -in {t1w} -ref {t2w} ' \ # '-omat {t1w2t2w}'.format(**kwargs) - t1w2t2w_rigid = pe.Node(interface=fsl.FLIRT(), - name='t1w2t2w_rigid') + t1w2t2w_rigid = pe.Node(interface=fsl.FLIRT(), name="t1w2t2w_rigid") t1w2t2w_rigid.inputs.dof = 6 - t1w2t2w_rigid.inputs.cost = 'mutualinfo' - preproc.connect(inputnode, 'T1w', t1w2t2w_rigid, 'in_file') - preproc.connect(inputnode, 'T2w', t1w2t2w_rigid, 'reference') + t1w2t2w_rigid.inputs.cost = "mutualinfo" + preproc.connect(inputnode, "T1w", t1w2t2w_rigid, "in_file") + preproc.connect(inputnode, "T2w", t1w2t2w_rigid, "reference") # t1w2t2w_mask = 'flirt -in {t1w_mask_edit} -interp nearestneighbour -ref {' \ # 't2w} -o {t2w_brain_mask} -applyxfm -init {' \ # 't1w2t2w}'.format(**kwargs) - t1w2t2w_mask = pe.Node(interface=fsl.FLIRT(), - name='t1w2t2w_mask') + t1w2t2w_mask = pe.Node(interface=fsl.FLIRT(), name="t1w2t2w_mask") t1w2t2w_mask.inputs.apply_xfm = True - t1w2t2w_mask.inputs.interp = 'nearestneighbour' + t1w2t2w_mask.inputs.interp = "nearestneighbour" - preproc.connect(inputnode, 'T1w_mask', t1w2t2w_mask, 'in_file') - preproc.connect(inputnode, 'T2w', t1w2t2w_mask, 'reference') - preproc.connect(t1w2t2w_rigid, 'out_matrix_file', t1w2t2w_mask, 'in_matrix_file') + preproc.connect(inputnode, "T1w_mask", t1w2t2w_mask, "in_file") + preproc.connect(inputnode, "T2w", t1w2t2w_mask, "reference") + preproc.connect(t1w2t2w_rigid, "out_matrix_file", t1w2t2w_mask, "in_matrix_file") # mask_t2w = 'fslmaths {t2w} -mas {t2w_brain_mask} ' \ # '{t2w_brain}'.format(**kwargs) - mask_t2w = pe.Node(interface=fsl.MultiImageMaths(), - name='mask_t2w') + mask_t2w = pe.Node(interface=fsl.MultiImageMaths(), name="mask_t2w") mask_t2w.inputs.op_string = "-mas %s " - preproc.connect(inputnode, 'T2w', mask_t2w, 'in_file') - preproc.connect(t1w2t2w_mask, 'out_file', mask_t2w, 'operand_files') + preproc.connect(inputnode, "T2w", mask_t2w, "in_file") + preproc.connect(t1w2t2w_mask, "out_file", mask_t2w, "operand_files") - preproc.connect(mask_t1w, 'out_file', outputnode, 'T1w_brain') - preproc.connect(mask_t2w, 'out_file', outputnode, 'T2w_brain') - preproc.connect(t1w2t2w_mask, 'out_file', outputnode, 'T2w_mask') + preproc.connect(mask_t1w, "out_file", outputnode, "T1w_brain") + preproc.connect(mask_t2w, "out_file", outputnode, "T2w_brain") + preproc.connect(t1w2t2w_mask, "out_file", outputnode, "T2w_mask") return preproc @@ -1271,29 +1449,32 @@ def mask_T2(wf_name='mask_T2'): outputs=["desc-preproc_T1w", "desc-reorient_T1w", "desc-head_T1w"], ) def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): - - anat_deoblique = pe.Node(interface=afni.Refit(), - name=f'anat_deoblique_{pipe_num}') + anat_deoblique = pe.Node(interface=afni.Refit(), name=f"anat_deoblique_{pipe_num}") anat_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('T1w') - wf.connect(node, out, anat_deoblique, 'in_file') + node, out = strat_pool.get_data("T1w") + wf.connect(node, out, anat_deoblique, "in_file") - anat_reorient = pe.Node(interface=afni.Resample(), - name=f'anat_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - anat_reorient.inputs.orientation = 'RPI' - anat_reorient.inputs.outputtype = 'NIFTI_GZ' + anat_reorient = pe.Node( + interface=afni.Resample(), + name=f"anat_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + anat_reorient.inputs.orientation = "RPI" + anat_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file') + wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file") - outputs = {'desc-preproc_T1w': (anat_reorient, 'out_file'), - 'desc-reorient_T1w': (anat_reorient, 'out_file'), - 'desc-head_T1w': (anat_reorient, 'out_file')} + outputs = { + "desc-preproc_T1w": (anat_reorient, "out_file"), + "desc-reorient_T1w": (anat_reorient, "out_file"), + "desc-head_T1w": (anat_reorient, "out_file"), + } return (wf, outputs) + @nodeblock( name="acpc_alignment_head", switch=[ @@ -1308,25 +1489,26 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_head(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data(['desc-preproc_T1w','desc-head_T1w']) - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data(["desc-preproc_T1w", "desc-head_T1w"]) + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") outputs = { - 'desc-head_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm': ( + "desc-head_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm": ( acpc_align, - 'outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + "outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ), } return (wf, outputs) @@ -1355,33 +1537,34 @@ def acpc_align_head(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_head_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data(['desc-head_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data(["desc-head_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") if strat_pool.check_rpool("space-T1w_desc-brain_mask"): node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, acpc_align, 'inputspec.brain_mask') + wf.connect(node, out, acpc_align, "inputspec.brain_mask") - node, out = strat_pool.get_data('T1w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T1w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-head_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'space-T1w_desc-brain_mask': ( - acpc_align, 'outputspec.acpc_brain_mask'), - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm': ( - acpc_align, 'outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + "desc-head_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "space-T1w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm": ( + acpc_align, + "outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ), } return (wf, outputs) @@ -1408,30 +1591,32 @@ def acpc_align_head_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_brain(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T1w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T1w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T1w': (acpc_align, 'outputspec.acpc_aligned_brain'), - 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm': ( - acpc_align, 'outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T1w": (acpc_align, "outputspec.acpc_aligned_brain"), + "from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm": ( + acpc_align, + "outputspec.from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm", + ), } return (wf, outputs) @@ -1456,33 +1641,35 @@ def acpc_align_brain(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def acpc_align_brain_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T1w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T1w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, acpc_align, 'inputspec.brain_mask') + wf.connect(node, out, acpc_align, "inputspec.brain_mask") - node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T1w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T1w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T1w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T1w': (acpc_align, 'outputspec.acpc_aligned_brain'), - 'space-T1w_desc-brain_mask': (acpc_align, 'outputspec.acpc_brain_mask'), - 'space-T1w_desc-prebrain_mask': (strat_pool.get_data('space-T1_desc-brain_mask')) + "desc-preproc_T1w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T1w": (acpc_align, "outputspec.acpc_aligned_brain"), + "space-T1w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), + "space-T1w_desc-prebrain_mask": ( + strat_pool.get_data("space-T1_desc-brain_mask") + ), } return (wf, outputs) @@ -1503,24 +1690,21 @@ def acpc_align_brain_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def registration_T2w_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): + T2_to_T1_reg = T2wToT1wReg(wf_name=f"T2w_to_T1w_Reg_{pipe_num}") - T2_to_T1_reg = T2wToT1wReg(wf_name=f'T2w_to_T1w_Reg_{pipe_num}') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, T2_to_T1_reg, "inputspec.T1w") - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T1w') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, T2_to_T1_reg, "inputspec.T2w") - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T2w') + node, out = strat_pool.get_data(["desc-acpcbrain_T1w"]) + wf.connect(node, out, T2_to_T1_reg, "inputspec.T1w_brain") - node, out = strat_pool.get_data(['desc-acpcbrain_T1w']) - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T1w_brain') + node, out = strat_pool.get_data(["desc-acpcbrain_T2w"]) + wf.connect(node, out, T2_to_T1_reg, "inputspec.T2w_brain") - node, out = strat_pool.get_data(['desc-acpcbrain_T2w']) - wf.connect(node, out, T2_to_T1_reg, 'inputspec.T2w_brain') - - outputs = { - 'desc-preproc_T2w': (T2_to_T1_reg, 'outputspec.T2w_to_T1w') - } + outputs = {"desc-preproc_T2w": (T2_to_T1_reg, "outputspec.T2w_to_T1w")} return (wf, outputs) @@ -1535,18 +1719,16 @@ def registration_T2w_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T1w"], ) def non_local_means(wf, cfg, strat_pool, pipe_num, opt=None): + denoise = pe.Node(interface=ants.DenoiseImage(), name=f"anat_denoise_{pipe_num}") - denoise = pe.Node(interface=ants.DenoiseImage(), - name=f'anat_denoise_{pipe_num}') - - denoise.inputs.noise_model = cfg.anatomical_preproc['non_local_means_filtering']['noise_model'] + denoise.inputs.noise_model = cfg.anatomical_preproc["non_local_means_filtering"][ + "noise_model" + ] - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, denoise, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, denoise, "input_image") - outputs = { - 'desc-preproc_T1w': (denoise, 'output_image') - } + outputs = {"desc-preproc_T1w": (denoise, "output_image")} return (wf, outputs) @@ -1568,18 +1750,20 @@ def non_local_means(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def n4_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): + n4 = pe.Node( + interface=ants.N4BiasFieldCorrection(dimension=3, copy_header=True), + name=f"anat_n4_{pipe_num}", + ) + n4.inputs.shrink_factor = cfg.anatomical_preproc["n4_bias_field_correction"][ + "shrink_factor" + ] - n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3, - copy_header=True), - name=f'anat_n4_{pipe_num}') - n4.inputs.shrink_factor = cfg.anatomical_preproc['n4_bias_field_correction']['shrink_factor'] - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, n4, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, n4, "input_image") outputs = { - 'desc-preproc_T1w': (n4, 'output_image'), - 'desc-n4_T1w': (n4, 'output_image') + "desc-preproc_T1w": (n4, "output_image"), + "desc-n4_T1w": (n4, "output_image"), } return (wf, outputs) @@ -1599,24 +1783,25 @@ def n4_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def t1t2_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): + t1t2_bias_correction = BiasFieldCorrection_sqrtT1wXT1w( + config=cfg, wf_name=f"t1t2_bias_correction_{pipe_num}" + ) - t1t2_bias_correction = BiasFieldCorrection_sqrtT1wXT1w(config=cfg, wf_name=f't1t2_bias_correction_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, t1t2_bias_correction, 'inputspec.T1w') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, t1t2_bias_correction, "inputspec.T1w") - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, t1t2_bias_correction, 'inputspec.T2w') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, t1t2_bias_correction, "inputspec.T2w") node, out = strat_pool.get_data("desc-acpcbrain_T1w") - wf.connect(node, out, t1t2_bias_correction, 'inputspec.T1w_brain') + wf.connect(node, out, t1t2_bias_correction, "inputspec.T1w_brain") outputs = { - 'desc-preproc_T1w': (t1t2_bias_correction, 'outputspec.T1w_biascorrected'), - 'desc-brain_T1w': (t1t2_bias_correction, 'outputspec.T1w_brain_biascorrected'), - 'desc-preproc_T2w': (t1t2_bias_correction, 'outputspec.T2w_biascorrected'), - 'desc-brain_T2w': (t1t2_bias_correction, 'outputspec.T2w_brain_biascorrected'), - 'desc-biasfield_T1wT2w': (t1t2_bias_correction, 'outputspec.biasfield'), + "desc-preproc_T1w": (t1t2_bias_correction, "outputspec.T1w_biascorrected"), + "desc-brain_T1w": (t1t2_bias_correction, "outputspec.T1w_brain_biascorrected"), + "desc-preproc_T2w": (t1t2_bias_correction, "outputspec.T2w_biascorrected"), + "desc-brain_T2w": (t1t2_bias_correction, "outputspec.T2w_brain_biascorrected"), + "desc-biasfield_T1wT2w": (t1t2_bias_correction, "outputspec.biasfield"), } return (wf, outputs) @@ -1634,7 +1819,6 @@ def t1t2_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1652,13 +1836,9 @@ def brain_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_afni(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'] - } + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1675,7 +1855,6 @@ def brain_mask_acpc_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1693,13 +1872,9 @@ def brain_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_fsl(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'] - } + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1716,9 +1891,7 @@ def brain_mask_acpc_fsl(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask", "desc-preproc_T1w"], ) def brain_mask_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1735,15 +1908,13 @@ def brain_mask_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask", "desc-preproc_T1w"], ) def brain_mask_acpc_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, wf_outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, wf_outputs = niworkflows_ants_brain_connector( + wf, cfg, strat_pool, pipe_num, opt + ) outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'], - 'desc-preproc_T1w': - wf_outputs['desc-preproc_T1w'] + "space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"], + "desc-preproc_T1w": wf_outputs["desc-preproc_T1w"], } return (wf, outputs) @@ -1761,7 +1932,6 @@ def brain_mask_acpc_niworkflows_ants(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_unet(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1779,13 +1949,9 @@ def brain_mask_unet(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_unet(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask'] - } + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1806,9 +1972,7 @@ def brain_mask_acpc_unet(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, - opt) + wf, outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1829,12 +1993,9 @@ def brain_mask_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): + wf, wf_outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - wf, wf_outputs = freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, - opt) - - outputs = {'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask']} + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1857,9 +2018,7 @@ def brain_mask_acpc_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-brain_mask"], ) def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1883,7 +2042,6 @@ def brain_mask_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-tight_brain_mask"], ) def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1907,11 +2065,9 @@ def brain_mask_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-acpcbrain_mask"], ) def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = {'space-T1w_desc-acpcbrain_mask': - wf_outputs['space-T1w_desc-brain_mask']} + outputs = {"space-T1w_desc-acpcbrain_mask": wf_outputs["space-T1w_desc-brain_mask"]} return (wf, outputs) @@ -1935,7 +2091,6 @@ def brain_mask_acpc_freesurfer_abcd(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-loose_brain_mask"], ) def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -1958,12 +2113,13 @@ def brain_mask_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T1w_desc-tight_acpcbrain_mask"], ) def brain_mask_acpc_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None): + wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) - - outputs = {'space-T1w_desc-tight_acpcbrain_mask': - wf_outputs['space-T1w_desc-tight_brain_mask']} + outputs = { + "space-T1w_desc-tight_acpcbrain_mask": wf_outputs[ + "space-T1w_desc-tight_brain_mask" + ] + } return (wf, outputs) @@ -1985,11 +2141,13 @@ def brain_mask_acpc_freesurfer_fsl_tight(wf, cfg, strat_pool, pipe_num, opt=None outputs=["space-T1w_desc-loose_acpcbrain_mask"], ) def brain_mask_acpc_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = {'space-T1w_desc-loose_acpcbrain_mask': - wf_outputs['space-T1w_desc-loose_brain_mask']} + outputs = { + "space-T1w_desc-loose_acpcbrain_mask": wf_outputs[ + "space-T1w_desc-loose_brain_mask" + ] + } return (wf, outputs) @@ -2014,8 +2172,7 @@ def brain_mask_acpc_freesurfer_fsl_loose(wf, cfg, strat_pool, pipe_num, opt=None }, ) def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): - - ''' + """ brain_mask_deoblique = pe.Node(interface=afni.Refit(), name='brain_mask_deoblique') brain_mask_deoblique.inputs.deoblique = True @@ -2030,25 +2187,26 @@ def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): brain_mask_reorient.inputs.outputtype = 'NIFTI_GZ' wf.connect(brain_mask_deoblique, 'out_file', brain_mask_reorient, 'in_file') - ''' - - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_{pipe_num}') + """ + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_{pipe_num}" + ) - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node_T1w, out_T1w = strat_pool.get_data('desc-head_T1w') - wf.connect(node_T1w, out_T1w, anat_skullstrip_orig_vol, 'in_file_a') + node_T1w, out_T1w = strat_pool.get_data("desc-head_T1w") + wf.connect(node_T1w, out_T1w, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask', - 'space-T1w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") outputs = { - 'desc-preproc_T1w': (anat_skullstrip_orig_vol, 'out_file'), - 'desc-brain_T1w': (anat_skullstrip_orig_vol, 'out_file'), - 'desc-head_T1w': (node_T1w, out_T1w) + "desc-preproc_T1w": (anat_skullstrip_orig_vol, "out_file"), + "desc-brain_T1w": (anat_skullstrip_orig_vol, "out_file"), + "desc-head_T1w": (node_T1w, out_T1w), } return (wf, outputs) @@ -2068,23 +2226,24 @@ def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def brain_extraction_temp(wf, cfg, strat_pool, pipe_num, opt=None): + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_temp_{pipe_num}" + ) - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_temp_{pipe_num}') - - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_a') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask', - 'space-T1w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") outputs = { - 'desc-preproc_T1w': (anat_skullstrip_orig_vol, 'out_file'), - 'desc-tempbrain_T1w': (anat_skullstrip_orig_vol, 'out_file') + "desc-preproc_T1w": (anat_skullstrip_orig_vol, "out_file"), + "desc-tempbrain_T1w": (anat_skullstrip_orig_vol, "out_file"), } return (wf, outputs) @@ -2098,26 +2257,28 @@ def brain_extraction_temp(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "desc-reorient_T2w", "desc-head_T2w"], ) def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): - - T2_deoblique = pe.Node(interface=afni.Refit(), - name=f'T2_deoblique_{pipe_num}') + T2_deoblique = pe.Node(interface=afni.Refit(), name=f"T2_deoblique_{pipe_num}") T2_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('T2w') - wf.connect(node, out, T2_deoblique, 'in_file') + node, out = strat_pool.get_data("T2w") + wf.connect(node, out, T2_deoblique, "in_file") - T2_reorient = pe.Node(interface=afni.Resample(), - name=f'T2_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - T2_reorient.inputs.orientation = 'RPI' - T2_reorient.inputs.outputtype = 'NIFTI_GZ' + T2_reorient = pe.Node( + interface=afni.Resample(), + name=f"T2_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + T2_reorient.inputs.orientation = "RPI" + T2_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(T2_deoblique, 'out_file', T2_reorient, 'in_file') + wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file") - outputs = {'desc-preproc_T2w': (T2_reorient, 'out_file'), - 'desc-reorient_T2w': (T2_reorient, 'out_file'), - 'desc-head_T2w': (T2_reorient, 'out_file')} + outputs = { + "desc-preproc_T2w": (T2_reorient, "out_file"), + "desc-reorient_T2w": (T2_reorient, "out_file"), + "desc-head_T2w": (T2_reorient, "out_file"), + } return (wf, outputs) @@ -2132,22 +2293,20 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def acpc_align_head_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head') - } + outputs = {"desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head")} return (wf, outputs) @@ -2162,23 +2321,22 @@ def acpc_align_head_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "space-T2w_desc-brain_mask"], ) def acpc_align_head_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'space-T2w_desc-brain_mask': ( - acpc_align, 'outputspec.acpc_brain_mask') + "desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head"), + "space-T2w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), } return (wf, outputs) @@ -2201,28 +2359,28 @@ def acpc_align_head_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "desc-acpcbrain_T2w"], ) def acpc_align_brain_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=False, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=False, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T2w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T2w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T2w': (acpc_align, 'outputspec.acpc_aligned_brain') + "desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T2w": (acpc_align, "outputspec.acpc_aligned_brain"), } return (wf, outputs) @@ -2242,33 +2400,32 @@ def acpc_align_brain_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w", "desc-acpcbrain_T2w", "space-T2w_desc-brain_mask"], ) def acpc_align_brain_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): + acpc_align = acpc_alignment( + config=cfg, + acpc_target=cfg.anatomical_preproc["acpc_alignment"]["acpc_target"], + mask=True, + wf_name=f"acpc_align_T2_{pipe_num}", + ) - acpc_align = acpc_alignment(config=cfg, - acpc_target=cfg.anatomical_preproc[ - 'acpc_alignment']['acpc_target'], - mask=True, - wf_name=f'acpc_align_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_leaf') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_leaf") - node, out = strat_pool.get_data('desc-tempbrain_T2w') - wf.connect(node, out, acpc_align, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-tempbrain_T2w") + wf.connect(node, out, acpc_align, "inputspec.anat_brain") - node, out = strat_pool.get_data('space-T2w_desc-brain_mask') - wf.connect(node, out, acpc_align, 'inputspec.brain_mask') + node, out = strat_pool.get_data("space-T2w_desc-brain_mask") + wf.connect(node, out, acpc_align, "inputspec.brain_mask") - node, out = strat_pool.get_data('T2w-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_head_for_acpc') + node, out = strat_pool.get_data("T2w-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_head_for_acpc") - node, out = strat_pool.get_data('T2w-brain-ACPC-template') - wf.connect(node, out, acpc_align, 'inputspec.template_brain_for_acpc') + node, out = strat_pool.get_data("T2w-brain-ACPC-template") + wf.connect(node, out, acpc_align, "inputspec.template_brain_for_acpc") outputs = { - 'desc-preproc_T2w': (acpc_align, 'outputspec.acpc_aligned_head'), - 'desc-acpcbrain_T2w': (acpc_align, 'outputspec.acpc_aligned_brain'), - 'space-T2w_desc-brain_mask': ( - acpc_align, 'outputspec.acpc_brain_mask') + "desc-preproc_T2w": (acpc_align, "outputspec.acpc_aligned_head"), + "desc-acpcbrain_T2w": (acpc_align, "outputspec.acpc_aligned_brain"), + "space-T2w_desc-brain_mask": (acpc_align, "outputspec.acpc_brain_mask"), } return (wf, outputs) @@ -2284,16 +2441,12 @@ def acpc_align_brain_with_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def non_local_means_T2(wf, cfg, strat_pool, pipe_num, opt=None): + denoise = pe.Node(interface=ants.DenoiseImage(), name=f"anat_denoise_T2_{pipe_num}") - denoise = pe.Node(interface=ants.DenoiseImage(), - name=f'anat_denoise_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, denoise, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, denoise, "input_image") - outputs = { - 'desc-preproc_T2w': (denoise, 'output_image') - } + outputs = {"desc-preproc_T2w": (denoise, "output_image")} return (wf, outputs) @@ -2308,18 +2461,17 @@ def non_local_means_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_T2w"], ) def n4_bias_correction_T2(wf, cfg, strat_pool, pipe_num, opt=None): + n4 = pe.Node( + interface=ants.N4BiasFieldCorrection( + dimension=3, shrink_factor=2, copy_header=True + ), + name=f"anat_n4_T2_{pipe_num}", + ) - n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3, - shrink_factor=2, - copy_header=True), - name=f'anat_n4_T2_{pipe_num}') - - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, n4, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, n4, "input_image") - outputs = { - 'desc-preproc_T2w': (n4, 'output_image') - } + outputs = {"desc-preproc_T2w": (n4, "output_image")} return (wf, outputs) @@ -2347,13 +2499,9 @@ def brain_mask_afni_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_afni_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2367,7 +2515,6 @@ def brain_mask_acpc_afni_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -2382,13 +2529,9 @@ def brain_mask_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2402,9 +2545,7 @@ def brain_mask_acpc_fsl_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None): - - wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) + wf, outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -2418,14 +2559,11 @@ def brain_mask_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None): + wf, wf_outputs = niworkflows_ants_brain_connector( + wf, cfg, strat_pool, pipe_num, opt + ) - wf, wf_outputs = niworkflows_ants_brain_connector(wf, cfg, strat_pool, - pipe_num, opt) - - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2439,7 +2577,6 @@ def brain_mask_acpc_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None) outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) return (wf, outputs) @@ -2454,13 +2591,9 @@ def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): - wf, wf_outputs = unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt) - outputs = { - 'space-T2w_desc-acpcbrain_mask': - wf_outputs['space-T2w_desc-brain_mask'] - } + outputs = {"space-T2w_desc-acpcbrain_mask": wf_outputs["space-T2w_desc-brain_mask"]} return (wf, outputs) @@ -2480,30 +2613,36 @@ def brain_mask_acpc_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): + brain_mask_T2 = mask_T2(wf_name=f"brain_mask_T2_{pipe_num}") - brain_mask_T2 = mask_T2(wf_name=f'brain_mask_T2_{pipe_num}') - - if not cfg.anatomical_preproc['acpc_alignment']['run']: - node, out = strat_pool.get_data(['desc-reorient_T1w','T1w','desc-preproc_T1w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w') + if not cfg.anatomical_preproc["acpc_alignment"]["run"]: + node, out = strat_pool.get_data( + ["desc-reorient_T1w", "T1w", "desc-preproc_T1w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w") - node, out = strat_pool.get_data(['desc-reorient_T2w', 'T2w', 'desc-preproc_T2w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T2w') + node, out = strat_pool.get_data( + ["desc-reorient_T2w", "T2w", "desc-preproc_T2w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T2w") else: - node, out = strat_pool.get_data(['desc-preproc_T1w','desc-reorient_T1w','T1w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w') + node, out = strat_pool.get_data( + ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w") - node, out = strat_pool.get_data(['desc-preproc_T2w','desc-reorient_T2w', 'T2w']) - wf.connect(node, out, brain_mask_T2, 'inputspec.T2w') + node, out = strat_pool.get_data( + ["desc-preproc_T2w", "desc-reorient_T2w", "T2w"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T2w") - node, out = strat_pool.get_data(["space-T1w_desc-brain_mask", - "space-T1w_desc-acpcbrain_mask"]) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w_mask') - - outputs = { - 'space-T2w_desc-brain_mask': (brain_mask_T2, 'outputspec.T2w_mask') - } + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w_mask") + + outputs = {"space-T2w_desc-brain_mask": (brain_mask_T2, "outputspec.T2w_mask")} return (wf, outputs) @@ -2520,21 +2659,20 @@ def brain_mask_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_T2(wf, cfg, strat_pool, pipe_num, opt=None): + brain_mask_T2 = mask_T2(wf_name=f"brain_mask_acpc_T2_{pipe_num}") - brain_mask_T2 = mask_T2(wf_name=f'brain_mask_acpc_T2_{pipe_num}') + node, out = strat_pool.get_data("desc-reorient_T1w") + wf.connect(node, out, brain_mask_T2, "inputspec.T1w") - node, out = strat_pool.get_data('desc-reorient_T1w') - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w') + node, out = strat_pool.get_data("desc-reorient_T2w") + wf.connect(node, out, brain_mask_T2, "inputspec.T2w") - node, out = strat_pool.get_data('desc-reorient_T2w') - wf.connect(node, out, brain_mask_T2, 'inputspec.T2w') - - node, out = strat_pool.get_data(["space-T1w_desc-acpcbrain_mask", "space-T1w_desc-prebrain_mask"]) - wf.connect(node, out, brain_mask_T2, 'inputspec.T1w_mask') + node, out = strat_pool.get_data( + ["space-T1w_desc-acpcbrain_mask", "space-T1w_desc-prebrain_mask"] + ) + wf.connect(node, out, brain_mask_T2, "inputspec.T1w_mask") - outputs = { - 'space-T2w_desc-acpcbrain_mask': (brain_mask_T2, 'outputspec.T2w_mask') - } + outputs = {"space-T2w_desc-acpcbrain_mask": (brain_mask_T2, "outputspec.T2w_mask")} return (wf, outputs) @@ -2553,27 +2691,26 @@ def brain_mask_acpc_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-brain_T2w"], ) def brain_extraction_T2(wf, cfg, strat_pool, pipe_num, opt=None): - - if cfg.anatomical_preproc['acpc_alignment']['run'] and cfg.anatomical_preproc['acpc_alignment']['acpc_target'] == 'brain': - outputs = { - 'desc-brain_T2w': (strat_pool.get_data(["desc-acpcbrain_T2w"])) - } + if ( + cfg.anatomical_preproc["acpc_alignment"]["run"] + and cfg.anatomical_preproc["acpc_alignment"]["acpc_target"] == "brain" + ): + outputs = {"desc-brain_T2w": (strat_pool.get_data(["desc-acpcbrain_T2w"]))} else: - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_T2_{pipe_num}') + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_T2_{pipe_num}" + ) - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_a') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T2w_desc-brain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data(["space-T2w_desc-brain_mask"]) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") - outputs = { - 'desc-brain_T2w': (anat_skullstrip_orig_vol, 'out_file') - } + outputs = {"desc-brain_T2w": (anat_skullstrip_orig_vol, "out_file")} return (wf, outputs) @@ -2591,26 +2728,26 @@ def brain_extraction_T2(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-tempbrain_T2w"], ) def brain_extraction_temp_T2(wf, cfg, strat_pool, pipe_num, opt=None): + anat_skullstrip_orig_vol = pe.Node( + interface=afni.Calc(), name=f"brain_extraction_temp_T2_{pipe_num}" + ) - anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(), - name=f'brain_extraction_temp_T2_{pipe_num}') - - anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)' - anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ' + anat_skullstrip_orig_vol.inputs.expr = "a*step(b)" + anat_skullstrip_orig_vol.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_a') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_a") - node, out = strat_pool.get_data(['space-T2w_desc-brain_mask', - 'space-T2w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b') + node, out = strat_pool.get_data( + ["space-T2w_desc-brain_mask", "space-T2w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_skullstrip_orig_vol, "in_file_b") - outputs = { - 'desc-tempbrain_T2w': (anat_skullstrip_orig_vol, 'out_file') - } + outputs = {"desc-tempbrain_T2w": (anat_skullstrip_orig_vol, "out_file")} return (wf, outputs) + @nodeblock( name="freesurfer_abcd_preproc", config=["surface_analysis", "abcd_prefreesurfer_prep"], @@ -2650,105 +2787,130 @@ def brain_extraction_temp_T2(wf, cfg, strat_pool, pipe_num, opt=None): ) def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): # fnirt-based brain extraction - brain_extraction = fnirt_based_brain_extraction(config=cfg, - wf_name=f'fnirt_based_brain_extraction_{pipe_num}') + brain_extraction = fnirt_based_brain_extraction( + config=cfg, wf_name=f"fnirt_based_brain_extraction_{pipe_num}" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, brain_extraction, 'inputspec.anat_data') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, brain_extraction, "inputspec.anat_data") - node, out = strat_pool.get_data('template-ref-mask-res-2') - wf.connect(node, out, brain_extraction, 'inputspec.template-ref-mask-res-2') + node, out = strat_pool.get_data("template-ref-mask-res-2") + wf.connect(node, out, brain_extraction, "inputspec.template-ref-mask-res-2") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, brain_extraction, 'inputspec.template_skull_for_anat') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, brain_extraction, "inputspec.template_skull_for_anat") - node, out = strat_pool.get_data('T1w-template-res-2') - wf.connect(node, out, brain_extraction, 'inputspec.template_skull_for_anat_2mm') + node, out = strat_pool.get_data("T1w-template-res-2") + wf.connect(node, out, brain_extraction, "inputspec.template_skull_for_anat_2mm") - node, out = strat_pool.get_data('T1w-brain-template-mask') - wf.connect(node, out, brain_extraction, 'inputspec.template_brain_mask_for_anat') + node, out = strat_pool.get_data("T1w-brain-template-mask") + wf.connect(node, out, brain_extraction, "inputspec.template_brain_mask_for_anat") # fast bias field correction - fast_correction = fast_bias_field_correction(config=cfg, - wf_name=f'fast_bias_field_correction_{pipe_num}') + fast_correction = fast_bias_field_correction( + config=cfg, wf_name=f"fast_bias_field_correction_{pipe_num}" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, fast_correction, 'inputspec.anat_data') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, fast_correction, "inputspec.anat_data") - wf.connect(brain_extraction, 'outputspec.anat_brain', fast_correction, 'inputspec.anat_brain') + wf.connect( + brain_extraction, + "outputspec.anat_brain", + fast_correction, + "inputspec.anat_brain", + ) - wf.connect(brain_extraction, 'outputspec.anat_brain_mask', fast_correction, 'inputspec.anat_brain_mask') + wf.connect( + brain_extraction, + "outputspec.anat_brain_mask", + fast_correction, + "inputspec.anat_brain_mask", + ) ### ABCD Harmonization ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/FreeSurfer/FreeSurferPipeline.sh#L140-L144 # flirt -interp spline -in "$T1wImage" -ref "$T1wImage" -applyisoxfm 1 -out "$T1wImageFile"_1mm.nii.gz - resample_head_1mm = pe.Node(interface=fsl.FLIRT(), - name=f'resample_anat_head_1mm_{pipe_num}') - resample_head_1mm.inputs.interp = 'spline' + resample_head_1mm = pe.Node( + interface=fsl.FLIRT(), name=f"resample_anat_head_1mm_{pipe_num}" + ) + resample_head_1mm.inputs.interp = "spline" resample_head_1mm.inputs.apply_isoxfm = 1 - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, resample_head_1mm, 'in_file') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, resample_head_1mm, "in_file") - wf.connect(node, out, resample_head_1mm, 'reference') + wf.connect(node, out, resample_head_1mm, "reference") # applywarp --rel --interp=spline -i "$T1wImage" -r "$T1wImageFile"_1mm.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o "$T1wImageFile"_1mm.nii.gz - applywarp_head_to_head_1mm = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_head_to_head_1mm_{pipe_num}') + applywarp_head_to_head_1mm = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_head_to_head_1mm_{pipe_num}" + ) applywarp_head_to_head_1mm.inputs.relwarp = True - applywarp_head_to_head_1mm.inputs.interp = 'spline' - applywarp_head_to_head_1mm.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_head_to_head_1mm.inputs.interp = "spline" + applywarp_head_to_head_1mm.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - wf.connect(node, out, applywarp_head_to_head_1mm, 'in_file') + wf.connect(node, out, applywarp_head_to_head_1mm, "in_file") - wf.connect(resample_head_1mm, 'out_file', - applywarp_head_to_head_1mm, 'ref_file') + wf.connect(resample_head_1mm, "out_file", applywarp_head_to_head_1mm, "ref_file") # applywarp --rel --interp=nn -i "$T1wImageBrain" -r "$T1wImageFile"_1mm.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o "$T1wImageBrainFile"_1mm.nii.gz - applywarp_brain_to_head_1mm = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_brain_to_head_1mm_{pipe_num}') + applywarp_brain_to_head_1mm = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_brain_to_head_1mm_{pipe_num}" + ) applywarp_brain_to_head_1mm.inputs.relwarp = True - applywarp_brain_to_head_1mm.inputs.interp = 'nn' - applywarp_brain_to_head_1mm.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - - wf.connect(fast_correction, 'outputspec.anat_brain_restore', - applywarp_brain_to_head_1mm, 'in_file') + applywarp_brain_to_head_1mm.inputs.interp = "nn" + applywarp_brain_to_head_1mm.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] + + wf.connect( + fast_correction, + "outputspec.anat_brain_restore", + applywarp_brain_to_head_1mm, + "in_file", + ) - wf.connect(resample_head_1mm, 'out_file', - applywarp_brain_to_head_1mm, 'ref_file') + wf.connect(resample_head_1mm, "out_file", applywarp_brain_to_head_1mm, "ref_file") # fslstats $T1wImageBrain -M - average_brain = pe.Node(interface=fsl.ImageStats(), - name=f'average_brain_{pipe_num}') - average_brain.inputs.op_string = '-M' - average_brain.inputs.output_type = 'NIFTI_GZ' + average_brain = pe.Node( + interface=fsl.ImageStats(), name=f"average_brain_{pipe_num}" + ) + average_brain.inputs.op_string = "-M" + average_brain.inputs.output_type = "NIFTI_GZ" - wf.connect(fast_correction, 'outputspec.anat_brain_restore', - average_brain, 'in_file') + wf.connect( + fast_correction, "outputspec.anat_brain_restore", average_brain, "in_file" + ) # fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz - normalize_head = pe.Node(util.Function(input_names=['in_file', 'number', 'out_file_suffix'], - output_names=['out_file'], - function=fslmaths_command), - name=f'normalize_head_{pipe_num}') - normalize_head.inputs.out_file_suffix = '_norm' + normalize_head = pe.Node( + util.Function( + input_names=["in_file", "number", "out_file_suffix"], + output_names=["out_file"], + function=fslmaths_command, + ), + name=f"normalize_head_{pipe_num}", + ) + normalize_head.inputs.out_file_suffix = "_norm" - wf.connect(applywarp_head_to_head_1mm, 'out_file', - normalize_head, 'in_file') + wf.connect(applywarp_head_to_head_1mm, "out_file", normalize_head, "in_file") - wf.connect(average_brain, 'out_stat', - normalize_head, 'number') + wf.connect(average_brain, "out_stat", normalize_head, "number") outputs = { - 'desc-restore_T1w': (fast_correction, 'outputspec.anat_restore'), - 'desc-restore-brain_T1w': (fast_correction, - 'outputspec.anat_brain_restore'), - 'pipeline-fs_desc-fast_biasfield': (fast_correction, 'outputspec.bias_field'), - 'desc-ABCDpreproc_T1w': (normalize_head, 'out_file') - } + "desc-restore_T1w": (fast_correction, "outputspec.anat_restore"), + "desc-restore-brain_T1w": (fast_correction, "outputspec.anat_brain_restore"), + "pipeline-fs_desc-fast_biasfield": (fast_correction, "outputspec.bias_field"), + "desc-ABCDpreproc_T1w": (normalize_head, "out_file"), + } return (wf, outputs) + @nodeblock( name="freesurfer_reconall", config=["surface_analysis", "freesurfer"], @@ -2761,49 +2923,47 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): "pipeline-fs_brainmask", "pipeline-fs_wmparc", "pipeline-fs_T1", - *freesurfer_abcd_preproc.outputs + *freesurfer_abcd_preproc.outputs, # we're grabbing the postproc outputs and appending them to # the reconall outputs ], ) def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None): - - reconall = pe.Node(interface=freesurfer.ReconAll(), - name=f'anat_freesurfer_{pipe_num}', - mem_gb=2.7) + reconall = pe.Node( + interface=freesurfer.ReconAll(), name=f"anat_freesurfer_{pipe_num}", mem_gb=2.7 + ) reconall.skip_timeout = True # this Node could take > 24 hours freesurfer_subject_dir = os.path.join( - cfg.pipeline_setup['working_directory']['path'], - 'cpac_'+cfg['subject_id'], - f'anat_preproc_freesurfer_{pipe_num}', - 'anat_freesurfer') + cfg.pipeline_setup["working_directory"]["path"], + "cpac_" + cfg["subject_id"], + f"anat_preproc_freesurfer_{pipe_num}", + "anat_freesurfer", + ) if not os.path.exists(freesurfer_subject_dir): os.makedirs(freesurfer_subject_dir) - reconall.inputs.directive = 'all' + reconall.inputs.directive = "all" reconall.inputs.subjects_dir = freesurfer_subject_dir - reconall.inputs.openmp = cfg.pipeline_setup['system_config'][ - 'num_OMP_threads'] + reconall.inputs.openmp = cfg.pipeline_setup["system_config"]["num_OMP_threads"] - if cfg.surface_analysis['freesurfer']['reconall_args'] is not None: - reconall.inputs.args = cfg.surface_analysis['freesurfer'][ - 'reconall_args'] + if cfg.surface_analysis["freesurfer"]["reconall_args"] is not None: + reconall.inputs.args = cfg.surface_analysis["freesurfer"]["reconall_args"] - node, out = strat_pool.get_data(["desc-ABCDpreproc_T1w","desc-preproc_T1w"]) - wf.connect(node, out, reconall, 'T1_files') + node, out = strat_pool.get_data(["desc-ABCDpreproc_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, reconall, "T1_files") wf, hemisphere_outputs = freesurfer_hemispheres(wf, reconall, pipe_num) outputs = { - 'freesurfer-subject-dir': (reconall, 'subjects_dir'), + "freesurfer-subject-dir": (reconall, "subjects_dir"), **hemisphere_outputs, - 'pipeline-fs_raw-average': (reconall, 'rawavg'), - 'pipeline-fs_subcortical-seg': (reconall, 'aseg'), - 'pipeline-fs_brainmask': (reconall, 'brainmask'), - 'pipeline-fs_wmparc': (reconall, 'wmparc'), - 'pipeline-fs_T1': (reconall, 'T1') + "pipeline-fs_raw-average": (reconall, "rawavg"), + "pipeline-fs_subcortical-seg": (reconall, "aseg"), + "pipeline-fs_brainmask": (reconall, "brainmask"), + "pipeline-fs_wmparc": (reconall, "wmparc"), + "pipeline-fs_T1": (reconall, "T1"), } # for label, connection in outputs.items(): @@ -2818,39 +2978,41 @@ def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None): return wf, outputs -def fnirt_based_brain_extraction(config=None, - wf_name='fnirt_based_brain_extraction'): - +def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extraction"): ### ABCD Harmonization - FNIRT-based brain extraction ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/scripts/BrainExtraction_FNIRTbased.sh preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['anat_data', - 'template-ref-mask-res-2', - 'template_skull_for_anat', - 'template_skull_for_anat_2mm', - 'template_brain_mask_for_anat']), - name='inputspec') - - outputnode = pe.Node(util.IdentityInterface(fields=['anat_brain', - 'anat_brain_mask']), - name='outputspec') + inputnode = pe.Node( + util.IdentityInterface( + fields=[ + "anat_data", + "template-ref-mask-res-2", + "template_skull_for_anat", + "template_skull_for_anat_2mm", + "template_brain_mask_for_anat", + ] + ), + name="inputspec", + ) + + outputnode = pe.Node( + util.IdentityInterface(fields=["anat_brain", "anat_brain_mask"]), + name="outputspec", + ) # Register to 2mm reference image (linear then non-linear) # linear registration to 2mm reference # flirt -interp spline -dof 12 -in "$Input" -ref "$Reference2mm" -omat "$WD"/roughlin.mat -out "$WD"/"$BaseName"_to_MNI_roughlin.nii.gz -nosearch - linear_reg = pe.Node(interface=fsl.FLIRT(), - name='linear_reg') + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_reg") linear_reg.inputs.dof = 12 - linear_reg.inputs.interp = 'spline' + linear_reg.inputs.interp = "spline" linear_reg.inputs.no_search = True - preproc.connect(inputnode, 'anat_data', - linear_reg, 'in_file') + preproc.connect(inputnode, "anat_data", linear_reg, "in_file") - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - linear_reg, 'reference') + preproc.connect(inputnode, "template_skull_for_anat_2mm", linear_reg, "reference") # non-linear registration to 2mm reference # fnirt --in="$Input" --ref="$Reference2mm" --aff="$WD"/roughlin.mat --refmask="$Reference2mmMask" \ @@ -2858,172 +3020,152 @@ def fnirt_based_brain_extraction(config=None, # --refout="$WD"/IntensityModulatedT1.nii.gz --iout="$WD"/"$BaseName"_to_MNI_nonlin.nii.gz \ # --logout="$WD"/NonlinearReg.txt --intout="$WD"/NonlinearIntensities.nii.gz \ # --cout="$WD"/NonlinearReg.nii.gz --config="$FNIRTConfig" - non_linear_reg = pe.Node(interface=fsl.FNIRT(), - name='non_linear_reg') + non_linear_reg = pe.Node(interface=fsl.FNIRT(), name="non_linear_reg") - non_linear_reg.inputs.field_file = True # --fout - non_linear_reg.inputs.jacobian_file = True # --jout - non_linear_reg.inputs.modulatedref_file = True # --refout + non_linear_reg.inputs.field_file = True # --fout + non_linear_reg.inputs.jacobian_file = True # --jout + non_linear_reg.inputs.modulatedref_file = True # --refout # non_linear_reg.inputs.warped_file = 'T1w_acpc_to_MNI_nonlin.nii.gz' # --iout # non_linear_reg.inputs.log_file = 'NonlinearReg.txt' # --logout - non_linear_reg.inputs.out_intensitymap_file = True # --intout - non_linear_reg.inputs.fieldcoeff_file = True # --cout + non_linear_reg.inputs.out_intensitymap_file = True # --intout + non_linear_reg.inputs.fieldcoeff_file = True # --cout non_linear_reg.inputs.config_file = config.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['fnirt_config'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] - preproc.connect(inputnode, 'anat_data', - non_linear_reg, 'in_file') + preproc.connect(inputnode, "anat_data", non_linear_reg, "in_file") - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - non_linear_reg, 'ref_file') + preproc.connect( + inputnode, "template_skull_for_anat_2mm", non_linear_reg, "ref_file" + ) - preproc.connect(linear_reg, 'out_matrix_file', - non_linear_reg, 'affine_file') + preproc.connect(linear_reg, "out_matrix_file", non_linear_reg, "affine_file") - preproc.connect(inputnode, 'template-ref-mask-res-2', - non_linear_reg, 'refmask_file') + preproc.connect( + inputnode, "template-ref-mask-res-2", non_linear_reg, "refmask_file" + ) # Overwrite the image output from FNIRT with a spline interpolated highres version # creating spline interpolated hires version # applywarp --rel --interp=spline --in="$Input" --ref="$Reference" -w "$WD"/str2standard.nii.gz --out="$WD"/"$BaseName"_to_MNI_nonlin.nii.gz - apply_warp = pe.Node(interface=fsl.ApplyWarp(), - name='apply_warp') + apply_warp = pe.Node(interface=fsl.ApplyWarp(), name="apply_warp") - apply_warp.inputs.interp = 'spline' + apply_warp.inputs.interp = "spline" apply_warp.inputs.relwarp = True - preproc.connect(inputnode, 'anat_data', - apply_warp, 'in_file') + preproc.connect(inputnode, "anat_data", apply_warp, "in_file") - preproc.connect(inputnode, 'template_skull_for_anat', - apply_warp, 'ref_file') + preproc.connect(inputnode, "template_skull_for_anat", apply_warp, "ref_file") - preproc.connect(non_linear_reg, 'field_file', - apply_warp, 'field_file') + preproc.connect(non_linear_reg, "field_file", apply_warp, "field_file") # Invert warp and transform dilated brain mask back into native space, and use it to mask input image # Input and reference spaces are the same, using 2mm reference to save time # invwarp --ref="$Reference2mm" -w "$WD"/str2standard.nii.gz -o "$WD"/standard2str.nii.gz - inverse_warp = pe.Node(interface=fsl.InvWarp(), name='inverse_warp') - inverse_warp.inputs.output_type = 'NIFTI_GZ' + inverse_warp = pe.Node(interface=fsl.InvWarp(), name="inverse_warp") + inverse_warp.inputs.output_type = "NIFTI_GZ" - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - inverse_warp, 'reference') + preproc.connect(inputnode, "template_skull_for_anat_2mm", inverse_warp, "reference") - preproc.connect(non_linear_reg, 'field_file', - inverse_warp, 'warp') + preproc.connect(non_linear_reg, "field_file", inverse_warp, "warp") # Apply inverse warp # applywarp --rel --interp=nn --in="$ReferenceMask" --ref="$Input" -w "$WD"/standard2str.nii.gz -o "$OutputBrainMask" - apply_inv_warp = pe.Node(interface=fsl.ApplyWarp(), - name='apply_inv_warp') - apply_inv_warp.inputs.interp = 'nn' + apply_inv_warp = pe.Node(interface=fsl.ApplyWarp(), name="apply_inv_warp") + apply_inv_warp.inputs.interp = "nn" apply_inv_warp.inputs.relwarp = True - preproc.connect(inputnode, 'template_brain_mask_for_anat', - apply_inv_warp, 'in_file') + preproc.connect( + inputnode, "template_brain_mask_for_anat", apply_inv_warp, "in_file" + ) + + preproc.connect(inputnode, "anat_data", apply_inv_warp, "ref_file") - preproc.connect(inputnode, 'anat_data', - apply_inv_warp, 'ref_file') + preproc.connect(inverse_warp, "inverse_warp", apply_inv_warp, "field_file") - preproc.connect(inverse_warp, 'inverse_warp', - apply_inv_warp, 'field_file') + preproc.connect(apply_inv_warp, "out_file", outputnode, "anat_brain_mask") - preproc.connect(apply_inv_warp, 'out_file', - outputnode, 'anat_brain_mask') - # Apply mask to create brain # fslmaths "$Input" -mas "$OutputBrainMask" "$OutputBrainExtractedImage" - apply_mask = pe.Node(interface=fsl.MultiImageMaths(), - name='apply_mask') - apply_mask.inputs.op_string = '-mas %s' + apply_mask = pe.Node(interface=fsl.MultiImageMaths(), name="apply_mask") + apply_mask.inputs.op_string = "-mas %s" - preproc.connect(inputnode, 'anat_data', - apply_mask, 'in_file') + preproc.connect(inputnode, "anat_data", apply_mask, "in_file") - preproc.connect(apply_inv_warp, 'out_file', - apply_mask, 'operand_files') + preproc.connect(apply_inv_warp, "out_file", apply_mask, "operand_files") - preproc.connect(apply_mask, 'out_file', - outputnode, 'anat_brain') - - return preproc + preproc.connect(apply_mask, "out_file", outputnode, "anat_brain") + return preproc -def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction'): +def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction"): ### ABCD Harmonization - FAST bias field correction ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L688-L694 preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['anat_data', - 'anat_brain', - 'anat_brain_mask']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["anat_data", "anat_brain", "anat_brain_mask"]), + name="inputspec", + ) - outputnode = pe.Node(util.IdentityInterface(fields=['anat_restore', - 'anat_brain_restore', - 'bias_field']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface( + fields=["anat_restore", "anat_brain_restore", "bias_field"] + ), + name="outputspec", + ) # fast -b -B -o ${T1wFolder}/T1w_fast -t 1 ${T1wFolder}/T1w_acpc_dc_brain.nii.gz - fast_bias_field_correction = pe.Node(interface=fsl.FAST(), - name='fast_bias_field_correction') + fast_bias_field_correction = pe.Node( + interface=fsl.FAST(), name="fast_bias_field_correction" + ) fast_bias_field_correction.inputs.img_type = 1 fast_bias_field_correction.inputs.output_biasfield = True fast_bias_field_correction.inputs.output_biascorrected = True - preproc.connect(inputnode, 'anat_brain', - fast_bias_field_correction, 'in_files') + preproc.connect(inputnode, "anat_brain", fast_bias_field_correction, "in_files") - preproc.connect(fast_bias_field_correction, 'restored_image', - outputnode, 'anat_brain_restore') + preproc.connect( + fast_bias_field_correction, "restored_image", outputnode, "anat_brain_restore" + ) - preproc.connect(fast_bias_field_correction, 'bias_field', - outputnode, 'bias_field') + preproc.connect(fast_bias_field_correction, "bias_field", outputnode, "bias_field") - # FAST does not output a non-brain extracted image so create an inverse mask, - # apply it to T1w_acpc_dc.nii.gz, insert the T1w_fast_restore to the skull of + # FAST does not output a non-brain extracted image so create an inverse mask, + # apply it to T1w_acpc_dc.nii.gz, insert the T1w_fast_restore to the skull of # the T1w_acpc_dc.nii.gz and use that for the T1w_acpc_dc_restore head # fslmaths ${T1wFolder}/T1w_acpc_brain_mask.nii.gz -mul -1 -add 1 ${T1wFolder}/T1w_acpc_inverse_brain_mask.nii.gz - inverse_brain_mask = pe.Node(interface=fsl.ImageMaths(), - name='inverse_brain_mask') - inverse_brain_mask.inputs.op_string = '-mul -1 -add 1' + inverse_brain_mask = pe.Node(interface=fsl.ImageMaths(), name="inverse_brain_mask") + inverse_brain_mask.inputs.op_string = "-mul -1 -add 1" - preproc.connect(inputnode, 'anat_brain_mask', - inverse_brain_mask, 'in_file') + preproc.connect(inputnode, "anat_brain_mask", inverse_brain_mask, "in_file") # fslmaths ${T1wFolder}/T1w_acpc_dc.nii.gz -mul ${T1wFolder}/T1w_acpc_inverse_brain_mask.nii.gz ${T1wFolder}/T1w_acpc_dc_skull.nii.gz - apply_mask = pe.Node(interface=fsl.MultiImageMaths(), - name='apply_mask') - apply_mask.inputs.op_string = '-mul %s' + apply_mask = pe.Node(interface=fsl.MultiImageMaths(), name="apply_mask") + apply_mask.inputs.op_string = "-mul %s" - preproc.connect(inputnode, 'anat_data', - apply_mask, 'in_file') + preproc.connect(inputnode, "anat_data", apply_mask, "in_file") - preproc.connect(inverse_brain_mask, 'out_file', - apply_mask, 'operand_files') + preproc.connect(inverse_brain_mask, "out_file", apply_mask, "operand_files") # fslmaths ${T1wFolder}/T1w_fast_restore.nii.gz -add ${T1wFolder}/T1w_acpc_dc_skull.nii.gz ${T1wFolder}/${T1wImage}_acpc_dc_restore - anat_restore = pe.Node(interface=fsl.MultiImageMaths(), - name='get_anat_restore') - anat_restore.inputs.op_string = '-add %s' + anat_restore = pe.Node(interface=fsl.MultiImageMaths(), name="get_anat_restore") + anat_restore.inputs.op_string = "-add %s" - preproc.connect(fast_bias_field_correction, 'restored_image', - anat_restore, 'in_file') + preproc.connect( + fast_bias_field_correction, "restored_image", anat_restore, "in_file" + ) - preproc.connect(apply_mask, 'out_file', - anat_restore, 'operand_files') + preproc.connect(apply_mask, "out_file", anat_restore, "operand_files") - preproc.connect(anat_restore, 'out_file', - outputnode, 'anat_restore') + preproc.connect(anat_restore, "out_file", outputnode, "anat_restore") return preproc - @nodeblock( name="correct_restore_brain_intensity_abcd", config=["anatomical_preproc", "brain_extraction"], @@ -3042,127 +3184,120 @@ def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction' ], outputs=["desc-restore-brain_T1w"], ) -def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, - opt=None): - +def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None): ### ABCD Harmonization - Myelin Map ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L655-L656 # fslmerge -t ${T1wFolder}/xfms/${T1wImage}_dc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc - merge_t1_acpc_to_list = pe.Node(util.Merge(3), - name=f'merge_t1_acpc_to_list_{pipe_num}') + merge_t1_acpc_to_list = pe.Node( + util.Merge(3), name=f"merge_t1_acpc_to_list_{pipe_num}" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, merge_t1_acpc_to_list, 'in1') - wf.connect(node, out, merge_t1_acpc_to_list, 'in2') - wf.connect(node, out, merge_t1_acpc_to_list, 'in3') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, merge_t1_acpc_to_list, "in1") + wf.connect(node, out, merge_t1_acpc_to_list, "in2") + wf.connect(node, out, merge_t1_acpc_to_list, "in3") - merge_t1_acpc = pe.Node(interface=fslMerge(), - name=f'merge_t1_acpc_{pipe_num}') + merge_t1_acpc = pe.Node(interface=fslMerge(), name=f"merge_t1_acpc_{pipe_num}") - merge_t1_acpc.inputs.dimension = 't' + merge_t1_acpc.inputs.dimension = "t" - wf.connect(merge_t1_acpc_to_list, 'out', - merge_t1_acpc, 'in_files') + wf.connect(merge_t1_acpc_to_list, "out", merge_t1_acpc, "in_files") # fslmaths ${T1wFolder}/xfms/${T1wImage}_dc -mul 0 ${T1wFolder}/xfms/${T1wImage}_dc - multiply_t1_acpc_by_zero = pe.Node(interface=fsl.ImageMaths(), - name=f'multiply_t1_acpc_by_zero_{pipe_num}') - - multiply_t1_acpc_by_zero.inputs.op_string = '-mul 0' + multiply_t1_acpc_by_zero = pe.Node( + interface=fsl.ImageMaths(), name=f"multiply_t1_acpc_by_zero_{pipe_num}" + ) - wf.connect(merge_t1_acpc, 'merged_file', - multiply_t1_acpc_by_zero, 'in_file') + multiply_t1_acpc_by_zero.inputs.op_string = "-mul 0" + + wf.connect(merge_t1_acpc, "merged_file", multiply_t1_acpc_by_zero, "in_file") # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L157 # convertwarp --relout --rel --ref="$T1wFolder"/"$T1wImageBrainMask" --premat="$T1wFolder"/xfms/"$InitialT1wTransform" \ # --warp1="$T1wFolder"/xfms/"$dcT1wTransform" --out="$T1wFolder"/xfms/"$OutputOrigT1wToT1w" - convertwarp_orig_t1_to_t1 = pe.Node(interface=fsl.ConvertWarp(), - name=f'convertwarp_orig_t1_to_t1_{pipe_num}') + convertwarp_orig_t1_to_t1 = pe.Node( + interface=fsl.ConvertWarp(), name=f"convertwarp_orig_t1_to_t1_{pipe_num}" + ) convertwarp_orig_t1_to_t1.inputs.out_relwarp = True convertwarp_orig_t1_to_t1.inputs.relwarp = True - + node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, convertwarp_orig_t1_to_t1, 'reference') + wf.connect(node, out, convertwarp_orig_t1_to_t1, "reference") - node, out = strat_pool.get_data('from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') - wf.connect(node, out, convertwarp_orig_t1_to_t1, 'premat') - wf.connect(multiply_t1_acpc_by_zero, 'out_file', - convertwarp_orig_t1_to_t1, 'warp1') + node, out = strat_pool.get_data("from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm") + wf.connect(node, out, convertwarp_orig_t1_to_t1, "premat") + wf.connect(multiply_t1_acpc_by_zero, "out_file", convertwarp_orig_t1_to_t1, "warp1") # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 # applywarp --rel --interp=spline -i "$BiasField" -r "$T1wImageBrain" -w "$AtlasTransform" -o "$BiasFieldOutput" - applywarp_biasfield = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_biasfield_{pipe_num}') + applywarp_biasfield = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_biasfield_{pipe_num}" + ) applywarp_biasfield.inputs.relwarp = True - applywarp_biasfield.inputs.interp = 'spline' + applywarp_biasfield.inputs.interp = "spline" - node, out = strat_pool.get_data('pipeline-fs_desc-fast_biasfield') - wf.connect(node, out, applywarp_biasfield, 'in_file') + node, out = strat_pool.get_data("pipeline-fs_desc-fast_biasfield") + wf.connect(node, out, applywarp_biasfield, "in_file") node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, applywarp_biasfield, 'ref_file') + wf.connect(node, out, applywarp_biasfield, "ref_file") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, applywarp_biasfield, 'field_file') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, applywarp_biasfield, "field_file") # fslmaths "$BiasFieldOutput" -thr 0.1 "$BiasFieldOutput" - threshold_biasfield = pe.Node(interface=fsl.ImageMaths(), - name=f'threshold_biasfield_{pipe_num}') + threshold_biasfield = pe.Node( + interface=fsl.ImageMaths(), name=f"threshold_biasfield_{pipe_num}" + ) - threshold_biasfield.inputs.op_string = '-thr 0.1' - wf.connect(applywarp_biasfield, 'out_file', - threshold_biasfield, 'in_file') + threshold_biasfield.inputs.op_string = "-thr 0.1" + wf.connect(applywarp_biasfield, "out_file", threshold_biasfield, "in_file") # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 # applywarp --rel --interp=spline -i "$OrginalT1wImage" -r "$T1wImageBrain" -w "$OutputOrigT1wToT1w" -o "$OutputT1wImage" - applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_t1_{pipe_num}') - + applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), name=f"applywarp_t1_{pipe_num}") + applywarp_t1.inputs.relwarp = True - applywarp_t1.inputs.interp = 'spline' - - node, out = strat_pool.get_data('desc-n4_T1w') - wf.connect(node, out, applywarp_t1, 'in_file') - + applywarp_t1.inputs.interp = "spline" + + node, out = strat_pool.get_data("desc-n4_T1w") + wf.connect(node, out, applywarp_t1, "in_file") + node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, applywarp_t1, 'ref_file') - - wf.connect(convertwarp_orig_t1_to_t1, 'out_file', - applywarp_t1, 'field_file') + wf.connect(node, out, applywarp_t1, "ref_file") + + wf.connect(convertwarp_orig_t1_to_t1, "out_file", applywarp_t1, "field_file") # fslmaths "$OutputT1wImage" -abs "$OutputT1wImage" -odt float - abs_t1 = pe.Node(interface=fsl.ImageMaths(), - name=f'abs_t1_{pipe_num}') + abs_t1 = pe.Node(interface=fsl.ImageMaths(), name=f"abs_t1_{pipe_num}") - abs_t1.inputs.op_string = '-abs' - wf.connect(applywarp_t1, 'out_file', abs_t1, 'in_file') + abs_t1.inputs.op_string = "-abs" + wf.connect(applywarp_t1, "out_file", abs_t1, "in_file") # fslmaths "$OutputT1wImage" -div "$BiasField" "$OutputT1wImageRestore" - div_t1_by_biasfield = pe.Node(interface=fsl.ImageMaths(), - name=f'div_t1_by_biasfield_{pipe_num}') + div_t1_by_biasfield = pe.Node( + interface=fsl.ImageMaths(), name=f"div_t1_by_biasfield_{pipe_num}" + ) - div_t1_by_biasfield.inputs.op_string = '-div' + div_t1_by_biasfield.inputs.op_string = "-div" - wf.connect(abs_t1, 'out_file', div_t1_by_biasfield, 'in_file') + wf.connect(abs_t1, "out_file", div_t1_by_biasfield, "in_file") - node, out = strat_pool.get_data('pipeline-fs_desc-fast_biasfield') - wf.connect(node, out, div_t1_by_biasfield, 'in_file2') + node, out = strat_pool.get_data("pipeline-fs_desc-fast_biasfield") + wf.connect(node, out, div_t1_by_biasfield, "in_file2") # fslmaths "$OutputT1wImageRestore" -mas "$T1wImageBrain" "$OutputT1wImageRestoreBrain" - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_restored_corrected_brain_{pipe_num}') + apply_mask = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_restored_corrected_brain_{pipe_num}" + ) - wf.connect(div_t1_by_biasfield, 'out_file', - apply_mask, 'in_file') + wf.connect(div_t1_by_biasfield, "out_file", apply_mask, "in_file") node, out = strat_pool.get_data("space-T1w_desc-brain_mask") - wf.connect(node, out, apply_mask, 'mask_file') + wf.connect(node, out, apply_mask, "mask_file") - outputs = { - 'desc-restore-brain_T1w': (apply_mask, 'out_file') - } + outputs = {"desc-restore-brain_T1w": (apply_mask, "out_file")} return (wf, outputs) - diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index f6f12fd983..2ef58c3d2a 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -1,9 +1,10 @@ # -*- coding: utf-8 -*- from nipype.interfaces import afni -from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util +from CPAC.pipeline import nipype_pipeline_engine as pe + def inverse_lesion(lesion_path): """ @@ -21,15 +22,16 @@ def inverse_lesion(lesion_path): path to the output file, if the lesion does not require to be inverted it returns the unchanged lesion_path input """ - import shutil - import os import ntpath + import os + import shutil - import CPAC.utils.nifti_utils as nu import nibabel as nib + import CPAC.utils.nifti_utils as nu + lesion_out = lesion_path - + if nu.more_zeros_than_ones(image=lesion_path): lesion_out = os.path.join(os.getcwd(), ntpath.basename(lesion_path)) shutil.copyfile(lesion_path, lesion_out) @@ -40,7 +42,7 @@ def inverse_lesion(lesion_path): return lesion_out -def create_lesion_preproc(wf_name='lesion_preproc'): +def create_lesion_preproc(wf_name="lesion_preproc"): """ The main purpose of this workflow is to process lesions masks. Lesion mask file is deobliqued and reoriented in the same way as the T1 in @@ -80,49 +82,45 @@ def create_lesion_preproc(wf_name='lesion_preproc'): >>> preproc.inputs.inputspec.lesion = 'sub1/anat/lesion-mask.nii.gz' >>> preproc.run() #doctest: +SKIP """ - preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['lesion']), name='inputspec') + inputnode = pe.Node(util.IdentityInterface(fields=["lesion"]), name="inputspec") - outputnode = pe.Node(util.IdentityInterface(fields=['refit', - 'reorient']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["refit", "reorient"]), name="outputspec" + ) - lesion_deoblique = pe.Node(interface=afni.Refit(), - name='lesion_deoblique') + lesion_deoblique = pe.Node(interface=afni.Refit(), name="lesion_deoblique") lesion_deoblique.inputs.deoblique = True - lesion_inverted = pe.Node(interface=util.Function( - input_names=['lesion_path'], - output_names=['lesion_out'], - function=inverse_lesion), - name='inverse_lesion') + lesion_inverted = pe.Node( + interface=util.Function( + input_names=["lesion_path"], + output_names=["lesion_out"], + function=inverse_lesion, + ), + name="inverse_lesion", + ) # We first check and invert the lesion if needed to be used by ANTs - preproc.connect( - inputnode, 'lesion', lesion_inverted, 'lesion_path') + preproc.connect(inputnode, "lesion", lesion_inverted, "lesion_path") - preproc.connect( - lesion_inverted, 'lesion_out', lesion_deoblique, 'in_file') + preproc.connect(lesion_inverted, "lesion_out", lesion_deoblique, "in_file") - preproc.connect( - lesion_deoblique, 'out_file', outputnode, 'refit') + preproc.connect(lesion_deoblique, "out_file", outputnode, "refit") # Anatomical reorientation - lesion_reorient = pe.Node(interface=afni.Resample(), - name='lesion_reorient', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - - lesion_reorient.inputs.orientation = 'RPI' - lesion_reorient.inputs.outputtype = 'NIFTI_GZ' - - preproc.connect( - lesion_deoblique, 'out_file', lesion_reorient, - 'in_file') - preproc.connect( - lesion_reorient, 'out_file', outputnode, 'reorient') + lesion_reorient = pe.Node( + interface=afni.Resample(), + name="lesion_reorient", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) + + lesion_reorient.inputs.orientation = "RPI" + lesion_reorient.inputs.outputtype = "NIFTI_GZ" + + preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file") + preproc.connect(lesion_reorient, "out_file", outputnode, "reorient") return preproc diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index b2f9e49ae1..fef6a01024 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -15,18 +15,22 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Functional preprocessing""" + # pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position from nipype import logging from nipype.interfaces import afni, ants, fsl, utility as util -logger = logging.getLogger('nipype.workflow') -from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock -from nipype.interfaces.afni import preprocess -from nipype.interfaces.afni import utils as afni_utils + +logger = logging.getLogger("nipype.workflow") +from nipype.interfaces.afni import preprocess, utils as afni_utils from CPAC.func_preproc.utils import nullify -from CPAC.utils.interfaces.ants import AI # niworkflows -from CPAC.utils.interfaces.ants import PrintHeader, SetDirectionByMatrix +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces.ants import ( + AI, # niworkflows + PrintHeader, + SetDirectionByMatrix, +) from CPAC.utils.utils import add_afni_prefix @@ -35,222 +39,213 @@ def collect_arguments(*args): if args[0]: command_args += [args[1]] command_args += args[2:] - return ' '.join(command_args) + return " ".join(command_args) -def anat_refined_mask(init_bold_mask=True, wf_name='init_bold_mask'): - +def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): wf = pe.Workflow(name=wf_name) - input_node = pe.Node(util.IdentityInterface(fields=['func', - 'anatomical_brain_mask', - 'anat_brain', - 'init_func_brain_mask']), - name='inputspec') + input_node = pe.Node( + util.IdentityInterface( + fields=[ + "func", + "anatomical_brain_mask", + "anat_brain", + "init_func_brain_mask", + ] + ), + name="inputspec", + ) - output_node = pe.Node(util.IdentityInterface(fields=['func_brain_mask']), - name='outputspec') + output_node = pe.Node( + util.IdentityInterface(fields=["func_brain_mask"]), name="outputspec" + ) # 1 Take single volume of func - func_single_volume = pe.Node(interface=afni.Calc(), - name='func_single_volume') + func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") # TODO add an option to select volume - func_single_volume.inputs.set( - expr='a', - single_idx=1, - outputtype='NIFTI_GZ' - ) + func_single_volume.inputs.set(expr="a", single_idx=1, outputtype="NIFTI_GZ") - wf.connect(input_node, 'func', - func_single_volume, 'in_file_a') + wf.connect(input_node, "func", func_single_volume, "in_file_a") # 2 get temporary func brain - func_tmp_brain = pe.Node(interface=afni_utils.Calc(), - name='func_tmp_brain') - func_tmp_brain.inputs.expr = 'a*b' - func_tmp_brain.inputs.outputtype = 'NIFTI_GZ' + func_tmp_brain = pe.Node(interface=afni_utils.Calc(), name="func_tmp_brain") + func_tmp_brain.inputs.expr = "a*b" + func_tmp_brain.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_single_volume, 'out_file', - func_tmp_brain, 'in_file_a') + wf.connect(func_single_volume, "out_file", func_tmp_brain, "in_file_a") # 2.1 get a tmp func brain mask if init_bold_mask == True: # 2.1.1 N4BiasFieldCorrection single volume of raw_func func_single_volume_n4_corrected = pe.Node( - interface=ants.N4BiasFieldCorrection(dimension=3, - copy_header=True, - bspline_fitting_distance=200), + interface=ants.N4BiasFieldCorrection( + dimension=3, copy_header=True, bspline_fitting_distance=200 + ), shrink_factor=2, - name='func_single_volume_n4_corrected') - func_single_volume_n4_corrected.inputs.args = '-r True' + name="func_single_volume_n4_corrected", + ) + func_single_volume_n4_corrected.inputs.args = "-r True" - wf.connect(func_single_volume, 'out_file', - func_single_volume_n4_corrected, 'input_image') + wf.connect( + func_single_volume, + "out_file", + func_single_volume_n4_corrected, + "input_image", + ) # 2.1.2 bet n4 corrected image - generate tmp func brain mask - func_tmp_brain_mask = pe.Node(interface=fsl.BET(), - name='func_tmp_brain_mask_pre') + func_tmp_brain_mask = pe.Node( + interface=fsl.BET(), name="func_tmp_brain_mask_pre" + ) func_tmp_brain_mask.inputs.mask = True - wf.connect(func_single_volume_n4_corrected, 'output_image', - func_tmp_brain_mask, 'in_file') + wf.connect( + func_single_volume_n4_corrected, + "output_image", + func_tmp_brain_mask, + "in_file", + ) # 2.1.3 dilate func tmp brain mask - func_tmp_brain_mask_dil = pe.Node(interface=fsl.ImageMaths(), - name='func_tmp_brain_mask_dil') - func_tmp_brain_mask_dil.inputs.op_string = '-dilM' + func_tmp_brain_mask_dil = pe.Node( + interface=fsl.ImageMaths(), name="func_tmp_brain_mask_dil" + ) + func_tmp_brain_mask_dil.inputs.op_string = "-dilM" - wf.connect(func_tmp_brain_mask, 'mask_file', - func_tmp_brain_mask_dil, 'in_file') + wf.connect(func_tmp_brain_mask, "mask_file", func_tmp_brain_mask_dil, "in_file") - wf.connect(func_tmp_brain_mask_dil, 'out_file', - func_tmp_brain, 'in_file_b') + wf.connect(func_tmp_brain_mask_dil, "out_file", func_tmp_brain, "in_file_b") else: # 2.1.1 connect dilated init func brain mask - wf.connect(input_node, 'init_func_brain_mask', - func_tmp_brain, 'in_file_b') + wf.connect(input_node, "init_func_brain_mask", func_tmp_brain, "in_file_b") # 3. get transformation of anat to func # 3.1 Register func tmp brain to anat brain to get func2anat matrix - linear_reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='func_to_anat_linear_reg') - linear_reg_func_to_anat.inputs.cost = 'mutualinfo' + linear_reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name="func_to_anat_linear_reg" + ) + linear_reg_func_to_anat.inputs.cost = "mutualinfo" linear_reg_func_to_anat.inputs.dof = 6 - wf.connect(func_tmp_brain, 'out_file', - linear_reg_func_to_anat, 'in_file') + wf.connect(func_tmp_brain, "out_file", linear_reg_func_to_anat, "in_file") - wf.connect(input_node, 'anat_brain', - linear_reg_func_to_anat, 'reference') + wf.connect(input_node, "anat_brain", linear_reg_func_to_anat, "reference") # 3.2 Inverse func to anat affine - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name='inv_func2anat_affine') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name="inv_func2anat_affine" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect( + linear_reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file" + ) # 4. anat mask to func space # Transform anatomical mask to functional space to get BOLD mask - reg_anat_mask_to_func = pe.Node(interface=fsl.FLIRT(), - name='reg_anat_mask_to_func') + reg_anat_mask_to_func = pe.Node(interface=fsl.FLIRT(), name="reg_anat_mask_to_func") reg_anat_mask_to_func.inputs.apply_xfm = True - reg_anat_mask_to_func.inputs.cost = 'mutualinfo' + reg_anat_mask_to_func.inputs.cost = "mutualinfo" reg_anat_mask_to_func.inputs.dof = 6 - reg_anat_mask_to_func.inputs.interp = 'nearestneighbour' + reg_anat_mask_to_func.inputs.interp = "nearestneighbour" - wf.connect(input_node, 'anatomical_brain_mask', - reg_anat_mask_to_func, 'in_file') + wf.connect(input_node, "anatomical_brain_mask", reg_anat_mask_to_func, "in_file") - wf.connect(func_tmp_brain, 'out_file', - reg_anat_mask_to_func, 'reference') + wf.connect(func_tmp_brain, "out_file", reg_anat_mask_to_func, "reference") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_mask_to_func, 'in_matrix_file') + wf.connect( + inv_func_to_anat_affine, "out_file", reg_anat_mask_to_func, "in_matrix_file" + ) # 5. get final func mask: refine func tmp mask with anat_mask_in_func mask - func_mask = pe.Node(interface=fsl.MultiImageMaths(), name='func_mask') + func_mask = pe.Node(interface=fsl.MultiImageMaths(), name="func_mask") func_mask.inputs.op_string = "-mul %s" - wf.connect(reg_anat_mask_to_func, 'out_file', - func_mask, 'operand_files') + wf.connect(reg_anat_mask_to_func, "out_file", func_mask, "operand_files") if init_bold_mask == True: - wf.connect(func_tmp_brain_mask_dil, 'out_file', - func_mask, 'in_file') + wf.connect(func_tmp_brain_mask_dil, "out_file", func_mask, "in_file") else: - wf.connect(input_node, 'init_func_brain_mask', - func_mask, 'in_file') + wf.connect(input_node, "init_func_brain_mask", func_mask, "in_file") - wf.connect(func_mask, 'out_file', - output_node, 'func_brain_mask') + wf.connect(func_mask, "out_file", output_node, "func_brain_mask") return wf -def anat_based_mask(wf_name='bold_mask'): - """reference `DCAN lab BOLD mask `_ - """ +def anat_based_mask(wf_name="bold_mask"): + """Reference `DCAN lab BOLD mask `_""" wf = pe.Workflow(name=wf_name) - input_node = pe.Node(util.IdentityInterface(fields=['func', - 'anat_brain', - 'anat_head']), - name='inputspec') + input_node = pe.Node( + util.IdentityInterface(fields=["func", "anat_brain", "anat_head"]), + name="inputspec", + ) - output_node = pe.Node(util.IdentityInterface(fields=['func_brain_mask']), - name='outputspec') + output_node = pe.Node( + util.IdentityInterface(fields=["func_brain_mask"]), name="outputspec" + ) # 0. Take single volume of func - func_single_volume = pe.Node(interface=afni.Calc(), - name='func_single_volume') + func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") - func_single_volume.inputs.set( - expr='a', - single_idx=1, - outputtype='NIFTI_GZ' - ) + func_single_volume.inputs.set(expr="a", single_idx=1, outputtype="NIFTI_GZ") - wf.connect(input_node, 'func', - func_single_volume, 'in_file_a') + wf.connect(input_node, "func", func_single_volume, "in_file_a") # 1. Register func head to anat head to get func2anat matrix - linear_reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='func_to_anat_linear_reg') + linear_reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name="func_to_anat_linear_reg" + ) linear_reg_func_to_anat.inputs.dof = 6 - linear_reg_func_to_anat.inputs.interp = 'spline' + linear_reg_func_to_anat.inputs.interp = "spline" linear_reg_func_to_anat.inputs.searchr_x = [30, 30] linear_reg_func_to_anat.inputs.searchr_y = [30, 30] linear_reg_func_to_anat.inputs.searchr_z = [30, 30] - wf.connect(func_single_volume, 'out_file', - linear_reg_func_to_anat, 'in_file') + wf.connect(func_single_volume, "out_file", linear_reg_func_to_anat, "in_file") - wf.connect(input_node, 'anat_head', - linear_reg_func_to_anat, 'reference') + wf.connect(input_node, "anat_head", linear_reg_func_to_anat, "reference") # 2. Inverse func to anat affine, to get anat-to-func transform - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name='inv_func2anat_affine') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name="inv_func2anat_affine" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect( + linear_reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file" + ) # 3. get BOLD mask # 3.1 Apply anat-to-func transform to transfer anatomical brain to functional space - reg_anat_brain_to_func = pe.Node(interface=fsl.ApplyWarp(), - name='reg_anat_brain_to_func') - reg_anat_brain_to_func.inputs.interp = 'nn' + reg_anat_brain_to_func = pe.Node( + interface=fsl.ApplyWarp(), name="reg_anat_brain_to_func" + ) + reg_anat_brain_to_func.inputs.interp = "nn" reg_anat_brain_to_func.inputs.relwarp = True - wf.connect(input_node, 'anat_brain', - reg_anat_brain_to_func, 'in_file') + wf.connect(input_node, "anat_brain", reg_anat_brain_to_func, "in_file") - wf.connect(input_node, 'func', - reg_anat_brain_to_func, 'ref_file') + wf.connect(input_node, "func", reg_anat_brain_to_func, "ref_file") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'premat') + wf.connect(inv_func_to_anat_affine, "out_file", reg_anat_brain_to_func, "premat") # 3.2 Binarize transfered image and fill holes to get BOLD mask. # Binarize - func_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name='func_mask') - func_mask_bin.inputs.op_string = '-bin' + func_mask_bin = pe.Node(interface=fsl.ImageMaths(), name="func_mask") + func_mask_bin.inputs.op_string = "-bin" - wf.connect(reg_anat_brain_to_func, 'out_file', - func_mask_bin, 'in_file') + wf.connect(reg_anat_brain_to_func, "out_file", func_mask_bin, "in_file") - wf.connect(func_mask_bin, 'out_file', - output_node, 'func_brain_mask') + wf.connect(func_mask_bin, "out_file", output_node, "func_brain_mask") return wf -def create_scale_func_wf(scaling_factor, wf_name='scale_func'): +def create_scale_func_wf(scaling_factor, wf_name="scale_func"): """Workflow to scale func data. Workflow Inputs:: @@ -271,31 +266,27 @@ def create_scale_func_wf(scaling_factor, wf_name='scale_func'): wf_name : str name of the workflow """ - # allocate a workflow object preproc = pe.Workflow(name=wf_name) # configure the workflow's input spec - inputNode = pe.Node(util.IdentityInterface(fields=['func']), - name='inputspec') + inputNode = pe.Node(util.IdentityInterface(fields=["func"]), name="inputspec") # configure the workflow's output spec - outputNode = pe.Node(util.IdentityInterface(fields=['scaled_func']), - name='outputspec') + outputNode = pe.Node( + util.IdentityInterface(fields=["scaled_func"]), name="outputspec" + ) # allocate a node to edit the functional file - func_scale = pe.Node(interface=afni_utils.Refit(), - name='func_scale') + func_scale = pe.Node(interface=afni_utils.Refit(), name="func_scale") func_scale.inputs.xyzscale = scaling_factor # wire in the func_get_idx node - preproc.connect(inputNode, 'func', - func_scale, 'in_file') + preproc.connect(inputNode, "func", func_scale, "in_file") # wire the output - preproc.connect(func_scale, 'out_file', - outputNode, 'scaled_func') + preproc.connect(func_scale, "out_file", outputNode, "scaled_func") return preproc @@ -333,125 +324,120 @@ def create_wf_edit_func(wf_name="edit_func"): -prefix rest_3dc.nii.gz """ - # allocate a workflow object preproc = pe.Workflow(name=wf_name) # configure the workflow's input spec - inputNode = pe.Node(util.IdentityInterface(fields=['func', - 'start_idx', - 'stop_idx']), - name='inputspec') + inputNode = pe.Node( + util.IdentityInterface(fields=["func", "start_idx", "stop_idx"]), + name="inputspec", + ) # configure the workflow's output spec - outputNode = pe.Node(util.IdentityInterface(fields=['edited_func']), - name='outputspec') + outputNode = pe.Node( + util.IdentityInterface(fields=["edited_func"]), name="outputspec" + ) # allocate a node to check that the requested edits are # reasonable given the data - func_get_idx = pe.Node(util.Function(input_names=['in_files', - 'stop_idx', - 'start_idx'], - output_names=['stopidx', - 'startidx'], - function=get_idx), - name='func_get_idx') + func_get_idx = pe.Node( + util.Function( + input_names=["in_files", "stop_idx", "start_idx"], + output_names=["stopidx", "startidx"], + function=get_idx, + ), + name="func_get_idx", + ) # wire in the func_get_idx node - preproc.connect(inputNode, 'func', - func_get_idx, 'in_files') - preproc.connect(inputNode, 'start_idx', - func_get_idx, 'start_idx') - preproc.connect(inputNode, 'stop_idx', - func_get_idx, 'stop_idx') + preproc.connect(inputNode, "func", func_get_idx, "in_files") + preproc.connect(inputNode, "start_idx", func_get_idx, "start_idx") + preproc.connect(inputNode, "stop_idx", func_get_idx, "stop_idx") # allocate a node to edit the functional file - func_drop_trs = pe.Node(interface=afni_utils.Calc(), - name='func_drop_trs', - mem_gb=0.37, - mem_x=(739971956005215 / 151115727451828646838272, - 'in_file_a')) + func_drop_trs = pe.Node( + interface=afni_utils.Calc(), + name="func_drop_trs", + mem_gb=0.37, + mem_x=(739971956005215 / 151115727451828646838272, "in_file_a"), + ) - func_drop_trs.inputs.expr = 'a' - func_drop_trs.inputs.outputtype = 'NIFTI_GZ' + func_drop_trs.inputs.expr = "a" + func_drop_trs.inputs.outputtype = "NIFTI_GZ" # wire in the inputs - preproc.connect(inputNode, 'func', - func_drop_trs, 'in_file_a') + preproc.connect(inputNode, "func", func_drop_trs, "in_file_a") - preproc.connect(func_get_idx, 'startidx', - func_drop_trs, 'start_idx') + preproc.connect(func_get_idx, "startidx", func_drop_trs, "start_idx") - preproc.connect(func_get_idx, 'stopidx', - func_drop_trs, 'stop_idx') + preproc.connect(func_get_idx, "stopidx", func_drop_trs, "stop_idx") # wire the output - preproc.connect(func_drop_trs, 'out_file', - outputNode, 'edited_func') + preproc.connect(func_drop_trs, "out_file", outputNode, "edited_func") return preproc -def slice_timing_wf(name='slice_timing', tpattern=None, tzero=None): +def slice_timing_wf(name="slice_timing", tpattern=None, tzero=None): # allocate a workflow object wf = pe.Workflow(name=name) # configure the workflow's input spec - inputNode = pe.Node(util.IdentityInterface(fields=['func_ts', - 'tr', - 'tpattern']), - name='inputspec') + inputNode = pe.Node( + util.IdentityInterface(fields=["func_ts", "tr", "tpattern"]), name="inputspec" + ) # configure the workflow's output spec outputNode = pe.Node( - util.IdentityInterface(fields=['slice_time_corrected']), - name='outputspec') + util.IdentityInterface(fields=["slice_time_corrected"]), name="outputspec" + ) # create TShift AFNI node - func_slice_timing_correction = pe.Node(interface=preprocess.TShift(), - name='slice_timing', - mem_gb=0.45, - mem_x=(5247073869855161 / - 604462909807314587353088, - 'in_file')) - func_slice_timing_correction.inputs.outputtype = 'NIFTI_GZ' + func_slice_timing_correction = pe.Node( + interface=preprocess.TShift(), + name="slice_timing", + mem_gb=0.45, + mem_x=(5247073869855161 / 604462909807314587353088, "in_file"), + ) + func_slice_timing_correction.inputs.outputtype = "NIFTI_GZ" if tzero is not None: func_slice_timing_correction.inputs.tzero = tzero - wf.connect([ - ( - inputNode, - func_slice_timing_correction, - [ - ( - 'func_ts', - 'in_file' - ), - # ( - # # add the @ prefix to the tpattern file going into - # # AFNI 3dTshift - needed this so the tpattern file - # # output from get_scan_params would be tied downstream - # # via a connection (to avoid poofing) - # ('tpattern', nullify, add_afni_prefix), - # 'tpattern' - # ), - ( - ('tr', nullify), - 'tr' - ), - ] - ), - ]) + wf.connect( + [ + ( + inputNode, + func_slice_timing_correction, + [ + ("func_ts", "in_file"), + # ( + # # add the @ prefix to the tpattern file going into + # # AFNI 3dTshift - needed this so the tpattern file + # # output from get_scan_params would be tied downstream + # # via a connection (to avoid poofing) + # ('tpattern', nullify, add_afni_prefix), + # 'tpattern' + # ), + (("tr", nullify), "tr"), + ], + ), + ] + ) if tpattern is not None: func_slice_timing_correction.inputs.tpattern = tpattern else: - wf.connect(inputNode, ('tpattern', nullify, add_afni_prefix), - func_slice_timing_correction, 'tpattern') + wf.connect( + inputNode, + ("tpattern", nullify, add_afni_prefix), + func_slice_timing_correction, + "tpattern", + ) - wf.connect(func_slice_timing_correction, 'out_file', - outputNode, 'slice_time_corrected') + wf.connect( + func_slice_timing_correction, "out_file", outputNode, "slice_time_corrected" + ) return wf @@ -485,7 +471,6 @@ def get_idx(in_files, stop_idx=None, start_idx=None): Value of last slice to consider for the functional run """ - # Import packages from nibabel import load @@ -496,12 +481,11 @@ def get_idx(in_files, stop_idx=None, start_idx=None): # Check to make sure the input file is 4-dimensional if len(shape) != 4: - raise TypeError('Input nifti file: %s is not a 4D file' % in_files) + raise TypeError("Input nifti file: %s is not a 4D file" % in_files) # Grab the number of volumes nvols = int(hdr.get_data_shape()[3]) - if (start_idx == None) or (int(start_idx) < 0) or ( - int(start_idx) > (nvols - 1)): + if (start_idx == None) or (int(start_idx) < 0) or (int(start_idx) > (nvols - 1)): startidx = 0 else: startidx = int(start_idx) @@ -515,76 +499,75 @@ def get_idx(in_files, stop_idx=None, start_idx=None): @nodeblock( - name='func_reorient', - config=['functional_preproc', 'update_header'], - switch=['run'], - inputs=['bold'], - outputs=['desc-preproc_bold', 'desc-reorient_bold'] + name="func_reorient", + config=["functional_preproc", "update_header"], + switch=["run"], + inputs=["bold"], + outputs=["desc-preproc_bold", "desc-reorient_bold"], ) def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): - - func_deoblique = pe.Node(interface=afni_utils.Refit(), - name=f'func_deoblique_{pipe_num}', - mem_gb=0.68, - mem_x=(4664065662093477 / - 1208925819614629174706176, - 'in_file')) + func_deoblique = pe.Node( + interface=afni_utils.Refit(), + name=f"func_deoblique_{pipe_num}", + mem_gb=0.68, + mem_x=(4664065662093477 / 1208925819614629174706176, "in_file"), + ) func_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('bold') - wf.connect(node, out, func_deoblique, 'in_file') + node, out = strat_pool.get_data("bold") + wf.connect(node, out, func_deoblique, "in_file") - func_reorient = pe.Node(interface=afni_utils.Resample(), - name=f'func_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) + func_reorient = pe.Node( + interface=afni_utils.Resample(), + name=f"func_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) - func_reorient.inputs.orientation = 'RPI' - func_reorient.inputs.outputtype = 'NIFTI_GZ' + func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_deoblique, 'out_file', func_reorient, 'in_file') + wf.connect(func_deoblique, "out_file", func_reorient, "in_file") outputs = { - 'desc-preproc_bold': (func_reorient, 'out_file'), - 'desc-reorient_bold': (func_reorient, 'out_file') + "desc-preproc_bold": (func_reorient, "out_file"), + "desc-reorient_bold": (func_reorient, "out_file"), } return (wf, outputs) @nodeblock( - name='func_scaling', - config=['functional_preproc', 'scaling'], - switch=['run'], - inputs=['desc-preproc_bold'], - outputs=['desc-preproc_bold'] + name="func_scaling", + config=["functional_preproc", "scaling"], + switch=["run"], + inputs=["desc-preproc_bold"], + outputs=["desc-preproc_bold"], ) def func_scaling(wf, cfg, strat_pool, pipe_num, opt=None): - scale_func_wf = create_scale_func_wf( - scaling_factor=cfg.scaling_factor, - wf_name=f"scale_func_{pipe_num}" + scaling_factor=cfg.scaling_factor, wf_name=f"scale_func_{pipe_num}" ) node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, scale_func_wf, 'inputspec.func') + wf.connect(node, out, scale_func_wf, "inputspec.func") - outputs = { - 'desc-preproc_bold': (scale_func_wf, 'outputspec.scaled_func') - } + outputs = {"desc-preproc_bold": (scale_func_wf, "outputspec.scaled_func")} return (wf, outputs) @nodeblock( - name='func_truncate', - config=['functional_preproc', 'truncation'], - inputs=['desc-preproc_bold'], - outputs={'desc-preproc_bold': { - 'Description': 'Truncated functional time-series BOLD data.'}} + name="func_truncate", + config=["functional_preproc", "truncation"], + inputs=["desc-preproc_bold"], + outputs={ + "desc-preproc_bold": { + "Description": "Truncated functional time-series BOLD data." + } + }, ) def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): - # if cfg.functional_preproc['truncation']['start_tr'] == 0 and \ # cfg.functional_preproc['truncation']['stop_tr'] == None: # data, key = strat_pool.get_data("desc-preproc_bold", @@ -592,333 +575,375 @@ def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): # outputs = {key: data} # return (wf, outputs) - trunc_wf = create_wf_edit_func( - wf_name=f"edit_func_{pipe_num}" - ) - trunc_wf.inputs.inputspec.start_idx = cfg.functional_preproc[ - 'truncation']['start_tr'] - trunc_wf.inputs.inputspec.stop_idx = cfg.functional_preproc['truncation'][ - 'stop_tr'] + trunc_wf = create_wf_edit_func(wf_name=f"edit_func_{pipe_num}") + trunc_wf.inputs.inputspec.start_idx = cfg.functional_preproc["truncation"][ + "start_tr" + ] + trunc_wf.inputs.inputspec.stop_idx = cfg.functional_preproc["truncation"]["stop_tr"] node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, trunc_wf, 'inputspec.func') + wf.connect(node, out, trunc_wf, "inputspec.func") - outputs = { - 'desc-preproc_bold': (trunc_wf, 'outputspec.edited_func') - } + outputs = {"desc-preproc_bold": (trunc_wf, "outputspec.edited_func")} return (wf, outputs) @nodeblock( - name='func_despike', - config=['functional_preproc', 'despiking'], - switch=['run'], - option_key=['space'], - option_val=['native'], - inputs=['desc-preproc_bold'], - outputs={'desc-preproc_bold': { - 'Description': 'De-spiked BOLD time-series via AFNI 3dDespike.'}} + name="func_despike", + config=["functional_preproc", "despiking"], + switch=["run"], + option_key=["space"], + option_val=["native"], + inputs=["desc-preproc_bold"], + outputs={ + "desc-preproc_bold": { + "Description": "De-spiked BOLD time-series via AFNI 3dDespike." + } + }, ) def func_despike(wf, cfg, strat_pool, pipe_num, opt=None): - - despike = pe.Node(interface=preprocess.Despike(), - name=f'func_despiked_{pipe_num}', - mem_gb=0.66, - mem_x=(8251808479088459 / 1208925819614629174706176, - 'in_file')) - despike.inputs.outputtype = 'NIFTI_GZ' + despike = pe.Node( + interface=preprocess.Despike(), + name=f"func_despiked_{pipe_num}", + mem_gb=0.66, + mem_x=(8251808479088459 / 1208925819614629174706176, "in_file"), + ) + despike.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, despike, 'in_file') + wf.connect(node, out, despike, "in_file") - outputs = { - 'desc-preproc_bold': (despike, 'out_file') - } + outputs = {"desc-preproc_bold": (despike, "out_file")} return (wf, outputs) @nodeblock( - name='func_despike_template', - config=['functional_preproc', 'despiking'], - switch=['run'], - option_key=['space'], - option_val=['template'], - inputs=[('space-template_desc-preproc_bold', - 'space-template_res-derivative_desc-preproc_bold'), - 'T1w-template-funcreg', 'T1w-template-deriv'], - outputs={'space-template_desc-preproc_bold': { - 'Description': 'De-spiked BOLD time-series via AFNI 3dDespike.', - 'Template': 'T1w-template-funcreg'}, - 'space-template_res-derivative_desc-preproc_bold': { - 'Description': 'De-spiked BOLD time-series via AFNI 3dDespike.', - 'Template': 'T1w-template-deriv'}} + name="func_despike_template", + config=["functional_preproc", "despiking"], + switch=["run"], + option_key=["space"], + option_val=["template"], + inputs=[ + ( + "space-template_desc-preproc_bold", + "space-template_res-derivative_desc-preproc_bold", + ), + "T1w-template-funcreg", + "T1w-template-deriv", + ], + outputs={ + "space-template_desc-preproc_bold": { + "Description": "De-spiked BOLD time-series via AFNI 3dDespike.", + "Template": "T1w-template-funcreg", + }, + "space-template_res-derivative_desc-preproc_bold": { + "Description": "De-spiked BOLD time-series via AFNI 3dDespike.", + "Template": "T1w-template-deriv", + }, + }, ) def func_despike_template(wf, cfg, strat_pool, pipe_num, opt=None): - - despike = pe.Node(interface=preprocess.Despike(), - name=f'func_despiked_template_{pipe_num}', - mem_gb=0.66, - mem_x=(8251808479088459 / 1208925819614629174706176, - 'in_file')) - despike.inputs.outputtype = 'NIFTI_GZ' + despike = pe.Node( + interface=preprocess.Despike(), + name=f"func_despiked_template_{pipe_num}", + mem_gb=0.66, + mem_x=(8251808479088459 / 1208925819614629174706176, "in_file"), + ) + despike.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("space-template_desc-preproc_bold") - wf.connect(node, out, despike, 'in_file') + wf.connect(node, out, despike, "in_file") - outputs = { - 'space-template_desc-preproc_bold': (despike, 'out_file') - } - - if strat_pool.get_data("space-template_res-derivative_desc-preproc_bold"): - despike_funcderiv = pe.Node(interface=preprocess.Despike(), - name=f'func_deriv_despiked_template_{pipe_num}', - mem_gb=0.66, - mem_x=(8251808479088459 / 1208925819614629174706176, - 'in_file')) - despike_funcderiv.inputs.outputtype = 'NIFTI_GZ' + outputs = {"space-template_desc-preproc_bold": (despike, "out_file")} - node, out = strat_pool.get_data("space-template_res-derivative_desc-preproc_bold") - wf.connect(node, out, despike_funcderiv, 'in_file') - - outputs.update({ - 'space-template_res-derivative_desc-preproc_bold': - (despike_funcderiv, 'out_file')}) + if strat_pool.get_data("space-template_res-derivative_desc-preproc_bold"): + despike_funcderiv = pe.Node( + interface=preprocess.Despike(), + name=f"func_deriv_despiked_template_{pipe_num}", + mem_gb=0.66, + mem_x=(8251808479088459 / 1208925819614629174706176, "in_file"), + ) + despike_funcderiv.inputs.outputtype = "NIFTI_GZ" + + node, out = strat_pool.get_data( + "space-template_res-derivative_desc-preproc_bold" + ) + wf.connect(node, out, despike_funcderiv, "in_file") + + outputs.update( + { + "space-template_res-derivative_desc-preproc_bold": ( + despike_funcderiv, + "out_file", + ) + } + ) return (wf, outputs) @nodeblock( - name='func_slice_time', - config=['functional_preproc', 'slice_timing_correction'], - switch=['run'], - inputs=['desc-preproc_bold', 'TR', 'tpattern'], - outputs={'desc-preproc_bold': { - 'Description': 'Slice-time corrected BOLD time-series via AFNI 3dTShift.'}, - 'desc-stc_bold': { - 'Description': 'Slice-time corrected BOLD time-series via AFNI 3dTShift.'}} + name="func_slice_time", + config=["functional_preproc", "slice_timing_correction"], + switch=["run"], + inputs=["desc-preproc_bold", "TR", "tpattern"], + outputs={ + "desc-preproc_bold": { + "Description": "Slice-time corrected BOLD time-series via AFNI 3dTShift." + }, + "desc-stc_bold": { + "Description": "Slice-time corrected BOLD time-series via AFNI 3dTShift." + }, + }, ) def func_slice_time(wf, cfg, strat_pool, pipe_num, opt=None): - - slice_time = slice_timing_wf(name='func_slice_timing_correction_' - f'{pipe_num}', - tpattern=cfg.functional_preproc[ - 'slice_timing_correction']['tpattern'], - tzero=cfg.functional_preproc[ - 'slice_timing_correction']['tzero']) + slice_time = slice_timing_wf( + name="func_slice_timing_correction_" f"{pipe_num}", + tpattern=cfg.functional_preproc["slice_timing_correction"]["tpattern"], + tzero=cfg.functional_preproc["slice_timing_correction"]["tzero"], + ) node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, slice_time, 'inputspec.func_ts') + wf.connect(node, out, slice_time, "inputspec.func_ts") - node, out = strat_pool.get_data('TR') - wf.connect(node, out, slice_time, 'inputspec.tr') + node, out = strat_pool.get_data("TR") + wf.connect(node, out, slice_time, "inputspec.tr") - node, out = strat_pool.get_data('tpattern') - wf.connect(node, out, slice_time, 'inputspec.tpattern') + node, out = strat_pool.get_data("tpattern") + wf.connect(node, out, slice_time, "inputspec.tpattern") outputs = { - 'desc-preproc_bold': (slice_time, 'outputspec.slice_time_corrected'), - 'desc-stc_bold': (slice_time, 'outputspec.slice_time_corrected') + "desc-preproc_bold": (slice_time, "outputspec.slice_time_corrected"), + "desc-stc_bold": (slice_time, "outputspec.slice_time_corrected"), } return (wf, outputs) @nodeblock( - name='bold_mask_afni', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='AFNI', - inputs=['desc-preproc_bold'], - outputs={'space-bold_desc-brain_mask': - {'Description': 'Binary brain mask of the BOLD functional time-series created by AFNI 3dAutomask.'}} + name="bold_mask_afni", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="AFNI", + inputs=["desc-preproc_bold"], + outputs={ + "space-bold_desc-brain_mask": { + "Description": "Binary brain mask of the BOLD functional time-series created by AFNI 3dAutomask." + } + }, ) def bold_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): - - func_get_brain_mask = pe.Node(interface=preprocess.Automask(), - name=f'func_get_brain_mask_AFNI_{pipe_num}') - func_get_brain_mask.inputs.outputtype = 'NIFTI_GZ' + func_get_brain_mask = pe.Node( + interface=preprocess.Automask(), name=f"func_get_brain_mask_AFNI_{pipe_num}" + ) + func_get_brain_mask.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_get_brain_mask, 'in_file') + wf.connect(node, out, func_get_brain_mask, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (func_get_brain_mask, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_get_brain_mask, "out_file")} return (wf, outputs) @nodeblock( - name='bold_mask_fsl', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='FSL', - inputs=['desc-preproc_bold'], - outputs=['space-bold_desc-brain_mask'] + name="bold_mask_fsl", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="FSL", + inputs=["desc-preproc_bold"], + outputs=["space-bold_desc-brain_mask"], ) def bold_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): - inputnode_bet = pe.Node( - util.IdentityInterface(fields=['frac', - 'mesh_boolean', - 'outline', - 'padding', - 'radius', - 'reduce_bias', - 'remove_eyes', - 'robust', - 'skull', - 'surfaces', - 'threshold', - 'vertical_gradient']), - name=f'BET_options_{pipe_num}') - - func_get_brain_mask = pe.Node(interface=fsl.BET(), - name=f'func_get_brain_mask_BET_{pipe_num}') - func_get_brain_mask.inputs.output_type = 'NIFTI_GZ' + util.IdentityInterface( + fields=[ + "frac", + "mesh_boolean", + "outline", + "padding", + "radius", + "reduce_bias", + "remove_eyes", + "robust", + "skull", + "surfaces", + "threshold", + "vertical_gradient", + ] + ), + name=f"BET_options_{pipe_num}", + ) + + func_get_brain_mask = pe.Node( + interface=fsl.BET(), name=f"func_get_brain_mask_BET_{pipe_num}" + ) + func_get_brain_mask.inputs.output_type = "NIFTI_GZ" func_get_brain_mask.inputs.mask = True inputnode_bet.inputs.set( - frac=cfg.functional_preproc['func_masking']['FSL-BET']['frac'], - mesh_boolean=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'mesh_boolean'], - outline=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'outline'], - padding=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'padding'], - radius=cfg.functional_preproc['func_masking']['FSL-BET']['radius'], - reduce_bias=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'reduce_bias'], - remove_eyes=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'remove_eyes'], - robust=cfg.functional_preproc['func_masking']['FSL-BET']['robust'], - skull=cfg.functional_preproc['func_masking']['FSL-BET']['skull'], - surfaces=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'surfaces'], - threshold=cfg.functional_preproc['func_masking']['FSL-BET'][ - 'threshold'], - vertical_gradient= - cfg.functional_preproc['func_masking']['FSL-BET'][ - 'vertical_gradient'], - ) - - wf.connect([ - (inputnode_bet, func_get_brain_mask, [ - ('frac', 'frac'), - ('mesh_boolean', 'mesh'), - ('outline', 'outline'), - ('padding', 'padding'), - ('radius', 'radius'), - ('reduce_bias', 'reduce_bias'), - ('remove_eyes', 'remove_eyes'), - ('robust', 'robust'), - ('skull', 'skull'), - ('surfaces', 'surfaces'), - ('threshold', 'threshold'), - ('vertical_gradient', 'vertical_gradient'), - ]) - ]) - - if cfg.functional_preproc['func_masking']['FSL-BET'][ - 'functional_mean_boolean']: - func_skull_mean = pe.Node(interface=afni_utils.TStat(), - name=f'func_mean_skull_{pipe_num}') - func_skull_mean.inputs.options = '-mean' - func_skull_mean.inputs.outputtype = 'NIFTI_GZ' + frac=cfg.functional_preproc["func_masking"]["FSL-BET"]["frac"], + mesh_boolean=cfg.functional_preproc["func_masking"]["FSL-BET"]["mesh_boolean"], + outline=cfg.functional_preproc["func_masking"]["FSL-BET"]["outline"], + padding=cfg.functional_preproc["func_masking"]["FSL-BET"]["padding"], + radius=cfg.functional_preproc["func_masking"]["FSL-BET"]["radius"], + reduce_bias=cfg.functional_preproc["func_masking"]["FSL-BET"]["reduce_bias"], + remove_eyes=cfg.functional_preproc["func_masking"]["FSL-BET"]["remove_eyes"], + robust=cfg.functional_preproc["func_masking"]["FSL-BET"]["robust"], + skull=cfg.functional_preproc["func_masking"]["FSL-BET"]["skull"], + surfaces=cfg.functional_preproc["func_masking"]["FSL-BET"]["surfaces"], + threshold=cfg.functional_preproc["func_masking"]["FSL-BET"]["threshold"], + vertical_gradient=cfg.functional_preproc["func_masking"]["FSL-BET"][ + "vertical_gradient" + ], + ) + + wf.connect( + [ + ( + inputnode_bet, + func_get_brain_mask, + [ + ("frac", "frac"), + ("mesh_boolean", "mesh"), + ("outline", "outline"), + ("padding", "padding"), + ("radius", "radius"), + ("reduce_bias", "reduce_bias"), + ("remove_eyes", "remove_eyes"), + ("robust", "robust"), + ("skull", "skull"), + ("surfaces", "surfaces"), + ("threshold", "threshold"), + ("vertical_gradient", "vertical_gradient"), + ], + ) + ] + ) + + if cfg.functional_preproc["func_masking"]["FSL-BET"]["functional_mean_boolean"]: + func_skull_mean = pe.Node( + interface=afni_utils.TStat(), name=f"func_mean_skull_{pipe_num}" + ) + func_skull_mean.inputs.options = "-mean" + func_skull_mean.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_skull_mean, 'in_file') + wf.connect(node, out, func_skull_mean, "in_file") - out_node, out_file = (func_skull_mean, 'out_file') + out_node, out_file = (func_skull_mean, "out_file") - if cfg.functional_preproc['func_masking']['FSL-BET'][ - 'functional_mean_thr']['run']: + if cfg.functional_preproc["func_masking"]["FSL-BET"]["functional_mean_thr"][ + "run" + ]: # T=$(fslstats ${subject}_tmean.nii.gz -p 98) - threshold_T = pe.Node(interface=fsl.ImageStats(), - name=f'func_mean_skull_thr_value_{pipe_num}', - iterfield=['in_file']) - threshold_T.inputs.op_string = "-p %f " % (cfg.functional_preproc['func_masking']['FSL-BET']['functional_mean_thr']['threshold_value']) - - wf.connect(func_skull_mean, 'out_file', threshold_T, 'in_file') + threshold_T = pe.Node( + interface=fsl.ImageStats(), + name=f"func_mean_skull_thr_value_{pipe_num}", + iterfield=["in_file"], + ) + threshold_T.inputs.op_string = ( + "-p %f " + % ( + cfg.functional_preproc["func_masking"]["FSL-BET"][ + "functional_mean_thr" + ]["threshold_value"] + ) + ) + + wf.connect(func_skull_mean, "out_file", threshold_T, "in_file") # z=$(echo "$T / 10" | bc -l) def form_thr_string(thr): - threshold_z = str(float(thr/10)) - return '-thr %s' % (threshold_z) - - form_thr_string = pe.Node(util.Function(input_names=['thr'], - output_names=['out_str'], - function=form_thr_string), - name=f'form_thr_string_{pipe_num}') + threshold_z = str(float(thr / 10)) + return "-thr %s" % (threshold_z) + + form_thr_string = pe.Node( + util.Function( + input_names=["thr"], + output_names=["out_str"], + function=form_thr_string, + ), + name=f"form_thr_string_{pipe_num}", + ) - wf.connect(threshold_T, 'out_stat', form_thr_string, 'thr') + wf.connect(threshold_T, "out_stat", form_thr_string, "thr") # fslmaths ${subject}_tmean.nii.gz -thr ${z} ${subject}_tmean_thr.nii.gz - func_skull_mean_thr = pe.Node(interface=fsl.ImageMaths(), - name=f'func_mean_skull_thr_{pipe_num}') - - wf.connect(func_skull_mean, 'out_file', func_skull_mean_thr, 'in_file') - wf.connect(form_thr_string, 'out_str', func_skull_mean_thr, 'op_string') + func_skull_mean_thr = pe.Node( + interface=fsl.ImageMaths(), name=f"func_mean_skull_thr_{pipe_num}" + ) - out_node, out_file = (func_skull_mean_thr, 'out_file') + wf.connect(func_skull_mean, "out_file", func_skull_mean_thr, "in_file") + wf.connect(form_thr_string, "out_str", func_skull_mean_thr, "op_string") - if cfg.functional_preproc['func_masking']['FSL-BET'][ - 'functional_mean_bias_correction']: + out_node, out_file = (func_skull_mean_thr, "out_file") + if cfg.functional_preproc["func_masking"]["FSL-BET"][ + "functional_mean_bias_correction" + ]: # fast --nopve -B ${subject}_tmean_thr.nii.gz - func_mean_skull_fast = pe.Node(interface=fsl.FAST(), - name=f'func_mean_skull_fast_{pipe_num}') + func_mean_skull_fast = pe.Node( + interface=fsl.FAST(), name=f"func_mean_skull_fast_{pipe_num}" + ) func_mean_skull_fast.inputs.no_pve = True func_mean_skull_fast.inputs.output_biascorrected = True - wf.connect(out_node, out_file, func_mean_skull_fast, 'in_files') + wf.connect(out_node, out_file, func_mean_skull_fast, "in_files") - out_node, out_file = (func_mean_skull_fast, 'restored_image') + out_node, out_file = (func_mean_skull_fast, "restored_image") - wf.connect(out_node, out_file, func_get_brain_mask, 'in_file') + wf.connect(out_node, out_file, func_get_brain_mask, "in_file") else: func_get_brain_mask.inputs.functional = True node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_get_brain_mask, 'in_file') + wf.connect(node, out, func_get_brain_mask, "in_file") # erode one voxel of functional brian mask - erode_one_voxel = pe.Node(interface=fsl.ErodeImage(), - name=f'erode_one_voxel_{pipe_num}') + erode_one_voxel = pe.Node( + interface=fsl.ErodeImage(), name=f"erode_one_voxel_{pipe_num}" + ) - erode_one_voxel.inputs.kernel_shape = 'box' + erode_one_voxel.inputs.kernel_shape = "box" erode_one_voxel.inputs.kernel_size = 1.0 - wf.connect(func_get_brain_mask, 'mask_file', - erode_one_voxel, 'in_file') + wf.connect(func_get_brain_mask, "mask_file", erode_one_voxel, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (erode_one_voxel, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (erode_one_voxel, "out_file")} return (wf, outputs) @nodeblock( - name='bold_mask_fsl_afni', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='FSL_AFNI', - inputs=[('motion-basefile', 'desc-preproc_bold'), 'FSL-AFNI-bold-ref', 'FSL-AFNI-brain-mask', - 'FSL-AFNI-brain-probseg'], - outputs=['space-bold_desc-brain_mask', 'desc-ref_bold'] + name="bold_mask_fsl_afni", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="FSL_AFNI", + inputs=[ + ("motion-basefile", "desc-preproc_bold"), + "FSL-AFNI-bold-ref", + "FSL-AFNI-brain-mask", + "FSL-AFNI-brain-probseg", + ], + outputs=["space-bold_desc-brain_mask", "desc-ref_bold"], ) def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): """fMRIPrep-style BOLD mask `Ref `_ """ - # Initialize transforms with antsAI init_aff = pe.Node( AI( @@ -930,13 +955,13 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): verbose=True, ), name=f"init_aff_{pipe_num}", - n_procs=cfg.pipeline_setup['system_config']['num_OMP_threads'], + n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], ) - node, out = strat_pool.get_data('FSL-AFNI-bold-ref') - wf.connect(node, out, init_aff, 'fixed_image') + node, out = strat_pool.get_data("FSL-AFNI-bold-ref") + wf.connect(node, out, init_aff, "fixed_image") - node, out = strat_pool.get_data('FSL-AFNI-brain-mask') - wf.connect(node, out, init_aff, 'fixed_image_mask') + node, out = strat_pool.get_data("FSL-AFNI-brain-mask") + wf.connect(node, out, init_aff, "fixed_image_mask") init_aff.inputs.search_grid = (40, (0, 40, 40)) @@ -946,27 +971,27 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): winsorize_upper_quantile=0.98, winsorize_lower_quantile=0.05, float=True, - metric=['Mattes'], + metric=["Mattes"], metric_weight=[1], radius_or_number_of_bins=[64], - transforms=['Affine'], + transforms=["Affine"], transform_parameters=[[0.1]], number_of_iterations=[[200]], convergence_window_size=[10], - convergence_threshold=[1.e-9], - sampling_strategy=['Random', 'Random'], + convergence_threshold=[1.0e-9], + sampling_strategy=["Random", "Random"], smoothing_sigmas=[[2]], - sigma_units=['mm', 'mm', 'mm'], + sigma_units=["mm", "mm", "mm"], shrink_factors=[[2]], sampling_percentage=[0.2], - use_histogram_matching=[True] + use_histogram_matching=[True], ), name=f"norm_{pipe_num}", - n_procs=cfg.pipeline_setup['system_config']['num_OMP_threads'], + n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"], ) - node, out = strat_pool.get_data('FSL-AFNI-bold-ref') - wf.connect(node, out, norm, 'fixed_image') + node, out = strat_pool.get_data("FSL-AFNI-bold-ref") + wf.connect(node, out, norm, "fixed_image") map_brainmask = pe.Node( ants.ApplyTransforms( @@ -977,12 +1002,13 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): ) # Use the higher resolution and probseg for numerical stability in rounding - node, out = strat_pool.get_data('FSL-AFNI-brain-probseg') - wf.connect(node, out, map_brainmask, 'input_image') + node, out = strat_pool.get_data("FSL-AFNI-brain-probseg") + wf.connect(node, out, map_brainmask, "input_image") - binarize_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'binarize_mask_{pipe_num}') - binarize_mask.inputs.args = '-thr 0.85 -bin' + binarize_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"binarize_mask_{pipe_num}" + ) + binarize_mask.inputs.args = "-thr 0.85 -bin" # Dilate pre_mask pre_dilate = pe.Node( @@ -997,10 +1023,10 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): # Fix precision errors # https://github.com/ANTsX/ANTs/wiki/Inputs-do-not-occupy-the-same-physical-space#fixing-precision-errors - print_header = pe.Node(PrintHeader(what_information=4), - name=f'print_header_{pipe_num}') - set_direction = pe.Node(SetDirectionByMatrix(), - name=f'set_direction_{pipe_num}') + print_header = pe.Node( + PrintHeader(what_information=4), name=f"print_header_{pipe_num}" + ) + set_direction = pe.Node(SetDirectionByMatrix(), name=f"set_direction_{pipe_num}") # Run N4 normally, force num_threads=1 for stability (images are # small, no need for >1) @@ -1009,634 +1035,742 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): dimension=3, copy_header=True, bspline_fitting_distance=200 ), shrink_factor=2, - rescale_intensities = True, + rescale_intensities=True, name=f"n4_correct_{pipe_num}", n_procs=1, ) skullstrip_first_pass = pe.Node( fsl.BET(frac=0.2, mask=True, functional=False), - name=f'skullstrip_first_pass_{pipe_num}') + name=f"skullstrip_first_pass_{pipe_num}", + ) bet_dilate = pe.Node( - fsl.DilateImage(operation='max', kernel_shape='sphere', - kernel_size=6.0, internal_datatype='char'), - name=f'skullstrip_first_dilate_{pipe_num}') + fsl.DilateImage( + operation="max", + kernel_shape="sphere", + kernel_size=6.0, + internal_datatype="char", + ), + name=f"skullstrip_first_dilate_{pipe_num}", + ) - bet_mask = pe.Node(fsl.ApplyMask(), name=f'skullstrip_first_mask_' - f'{pipe_num}') + bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_" f"{pipe_num}") - unifize = pe.Node(afni_utils.Unifize(t2=True, outputtype='NIFTI_GZ', - args='-clfrac 0.2 -rbt 18.3 65.0 90.0', - out_file="uni.nii.gz"), - name=f'unifize_{pipe_num}') + unifize = pe.Node( + afni_utils.Unifize( + t2=True, + outputtype="NIFTI_GZ", + args="-clfrac 0.2 -rbt 18.3 65.0 90.0", + out_file="uni.nii.gz", + ), + name=f"unifize_{pipe_num}", + ) skullstrip_second_pass = pe.Node( - preprocess.Automask(dilate=1, outputtype='NIFTI_GZ'), - name=f'skullstrip_second_pass_{pipe_num}') + preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"), + name=f"skullstrip_second_pass_{pipe_num}", + ) - combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), - name=f'combine_masks_{pipe_num}') + combine_masks = pe.Node( + fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}" + ) - apply_mask = pe.Node(fsl.ApplyMask(), - name=f'extract_ref_brain_bold_{pipe_num}') + apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}") node, out = strat_pool.get_data(["motion-basefile"]) - wf.connect([(node, init_aff, [(out, "moving_image")]), - (node, map_brainmask, [(out, "reference_image")]), - (node, norm, [(out, "moving_image")]), - (init_aff, norm, [ - ("output_transform", "initial_moving_transform")]), - (norm, map_brainmask, [ + wf.connect( + [ + (node, init_aff, [(out, "moving_image")]), + (node, map_brainmask, [(out, "reference_image")]), + (node, norm, [(out, "moving_image")]), + (init_aff, norm, [("output_transform", "initial_moving_transform")]), + ( + norm, + map_brainmask, + [ ("reverse_invert_flags", "invert_transform_flags"), ("reverse_transforms", "transforms"), - ]), - (map_brainmask, binarize_mask, [("output_image", "in_file")]), - (binarize_mask, pre_dilate, [("out_file", "in_file")]), - (pre_dilate, print_header, [("out_file", "image")]), - (print_header, set_direction, [("header", "direction")]), - (node, set_direction, [(out, "infile"), (out, "outfile")]), - (set_direction, n4_correct, [("outfile", "mask_image")]), - (node, n4_correct, [(out, "input_image")]), - (n4_correct, skullstrip_first_pass, - [('output_image', 'in_file')]), - (skullstrip_first_pass, bet_dilate, - [('mask_file', 'in_file')]), - (bet_dilate, bet_mask, [('out_file', 'mask_file')]), - (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]), - (bet_mask, unifize, [('out_file', 'in_file')]), - (unifize, skullstrip_second_pass, [('out_file', 'in_file')]), - (skullstrip_first_pass, combine_masks, - [('mask_file', 'in_file')]), - (skullstrip_second_pass, combine_masks, - [('out_file', 'operand_file')]), - (unifize, apply_mask, [('out_file', 'in_file')]), - (combine_masks, apply_mask, [('out_file', 'mask_file')]), - ]) + ], + ), + (map_brainmask, binarize_mask, [("output_image", "in_file")]), + (binarize_mask, pre_dilate, [("out_file", "in_file")]), + (pre_dilate, print_header, [("out_file", "image")]), + (print_header, set_direction, [("header", "direction")]), + (node, set_direction, [(out, "infile"), (out, "outfile")]), + (set_direction, n4_correct, [("outfile", "mask_image")]), + (node, n4_correct, [(out, "input_image")]), + (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), + (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), + (bet_dilate, bet_mask, [("out_file", "mask_file")]), + (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), + (bet_mask, unifize, [("out_file", "in_file")]), + (unifize, skullstrip_second_pass, [("out_file", "in_file")]), + (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), + (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]), + (unifize, apply_mask, [("out_file", "in_file")]), + (combine_masks, apply_mask, [("out_file", "mask_file")]), + ] + ) outputs = { - 'space-bold_desc-brain_mask': (combine_masks, 'out_file'), - 'desc-ref_bold': (apply_mask, 'out_file') + "space-bold_desc-brain_mask": (combine_masks, "out_file"), + "desc-ref_bold": (apply_mask, "out_file"), } return (wf, outputs) @nodeblock( - name='bold_mask_anatomical_refined', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='Anatomical_Refined', - inputs=[('bold', 'desc-preproc_bold'), - ('desc-brain_T1w', ['space-T1w_desc-brain_mask', 'space-T1w_desc-acpcbrain_mask'])], - outputs=['space-bold_desc-brain_mask'] + name="bold_mask_anatomical_refined", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="Anatomical_Refined", + inputs=[ + ("bold", "desc-preproc_bold"), + ( + "desc-brain_T1w", + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"], + ), + ], + outputs=["space-bold_desc-brain_mask"], ) def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): - # binarize anat mask, in case it is not a binary mask. - anat_brain_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name=f'anat_brain_mask_bin_{pipe_num}') - anat_brain_mask_bin.inputs.op_string = '-bin' + anat_brain_mask_bin = pe.Node( + interface=fsl.ImageMaths(), name=f"anat_brain_mask_bin_{pipe_num}" + ) + anat_brain_mask_bin.inputs.op_string = "-bin" - node, out = strat_pool.get_data(['space-T1w_desc-brain_mask', - 'space-T1w_desc-acpcbrain_mask']) - wf.connect(node, out, anat_brain_mask_bin, 'in_file') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"] + ) + wf.connect(node, out, anat_brain_mask_bin, "in_file") # fill holes of anat mask - anat_mask_filled = pe.Node(interface=afni.MaskTool(), - name=f'anat_brain_mask_filled_{pipe_num}') + anat_mask_filled = pe.Node( + interface=afni.MaskTool(), name=f"anat_brain_mask_filled_{pipe_num}" + ) anat_mask_filled.inputs.fill_holes = True - anat_mask_filled.inputs.outputtype = 'NIFTI_GZ' + anat_mask_filled.inputs.outputtype = "NIFTI_GZ" - wf.connect(anat_brain_mask_bin, 'out_file', - anat_mask_filled, 'in_file') + wf.connect(anat_brain_mask_bin, "out_file", anat_mask_filled, "in_file") # init_bold_mask : input raw func - init_bold_mask = anat_refined_mask(init_bold_mask=True, - wf_name=f'init_bold_mask_{pipe_num}') + init_bold_mask = anat_refined_mask( + init_bold_mask=True, wf_name=f"init_bold_mask_{pipe_num}" + ) - func_deoblique = pe.Node(interface=afni_utils.Refit(), - name=f'raw_func_deoblique_{pipe_num}') + func_deoblique = pe.Node( + interface=afni_utils.Refit(), name=f"raw_func_deoblique_{pipe_num}" + ) func_deoblique.inputs.deoblique = True - node, out = strat_pool.get_data('bold') - wf.connect(node, out, func_deoblique, 'in_file') + node, out = strat_pool.get_data("bold") + wf.connect(node, out, func_deoblique, "in_file") - func_reorient = pe.Node(interface=afni_utils.Resample(), - name=f'raw_func_reorient_{pipe_num}', - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) + func_reorient = pe.Node( + interface=afni_utils.Resample(), + name=f"raw_func_reorient_{pipe_num}", + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) - func_reorient.inputs.orientation = 'RPI' - func_reorient.inputs.outputtype = 'NIFTI_GZ' + func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_deoblique, 'out_file', - func_reorient, 'in_file') + wf.connect(func_deoblique, "out_file", func_reorient, "in_file") - wf.connect(func_reorient, 'out_file', - init_bold_mask, 'inputspec.func') + wf.connect(func_reorient, "out_file", init_bold_mask, "inputspec.func") - wf.connect(anat_mask_filled, 'out_file', - init_bold_mask, 'inputspec.anatomical_brain_mask') + wf.connect( + anat_mask_filled, "out_file", init_bold_mask, "inputspec.anatomical_brain_mask" + ) - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, init_bold_mask, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, init_bold_mask, "inputspec.anat_brain") # dilate init func brain mask - func_tmp_brain_mask = pe.Node(interface=fsl.ImageMaths(), - name=f'func_tmp_brain_mask_dil_{pipe_num}') - func_tmp_brain_mask.inputs.op_string = '-dilM' + func_tmp_brain_mask = pe.Node( + interface=fsl.ImageMaths(), name=f"func_tmp_brain_mask_dil_{pipe_num}" + ) + func_tmp_brain_mask.inputs.op_string = "-dilM" - wf.connect(init_bold_mask, 'outputspec.func_brain_mask', - func_tmp_brain_mask, 'in_file') + wf.connect( + init_bold_mask, "outputspec.func_brain_mask", func_tmp_brain_mask, "in_file" + ) # refined_bold_mask : input motion corrected func - refined_bold_mask = anat_refined_mask(init_bold_mask=False, - wf_name='refined_bold_mask' - f'_{pipe_num}') + refined_bold_mask = anat_refined_mask( + init_bold_mask=False, wf_name="refined_bold_mask" f"_{pipe_num}" + ) - node, out = strat_pool.get_data(["desc-preproc_bold", - "bold"]) - wf.connect(node, out, refined_bold_mask, 'inputspec.func') + node, out = strat_pool.get_data(["desc-preproc_bold", "bold"]) + wf.connect(node, out, refined_bold_mask, "inputspec.func") - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, refined_bold_mask, 'inputspec.anat_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, refined_bold_mask, "inputspec.anat_brain") - wf.connect(func_tmp_brain_mask, 'out_file', - refined_bold_mask, 'inputspec.init_func_brain_mask') + wf.connect( + func_tmp_brain_mask, + "out_file", + refined_bold_mask, + "inputspec.init_func_brain_mask", + ) # dilate anatomical mask - if cfg.functional_preproc['func_masking']['Anatomical_Refined'][ - 'anatomical_mask_dilation']: - anat_mask_dilate = pe.Node(interface=afni.MaskTool(), - name=f'anat_mask_dilate_{pipe_num}') - anat_mask_dilate.inputs.dilate_inputs = '1' - anat_mask_dilate.inputs.outputtype = 'NIFTI_GZ' - - wf.connect(anat_mask_filled, 'out_file', - anat_mask_dilate, 'in_file') - wf.connect(anat_mask_dilate, 'out_file', - refined_bold_mask, 'inputspec.anatomical_brain_mask') + if cfg.functional_preproc["func_masking"]["Anatomical_Refined"][ + "anatomical_mask_dilation" + ]: + anat_mask_dilate = pe.Node( + interface=afni.MaskTool(), name=f"anat_mask_dilate_{pipe_num}" + ) + anat_mask_dilate.inputs.dilate_inputs = "1" + anat_mask_dilate.inputs.outputtype = "NIFTI_GZ" + + wf.connect(anat_mask_filled, "out_file", anat_mask_dilate, "in_file") + wf.connect( + anat_mask_dilate, + "out_file", + refined_bold_mask, + "inputspec.anatomical_brain_mask", + ) else: - wf.connect(anat_mask_filled, 'out_file', - refined_bold_mask, 'inputspec.anatomical_brain_mask') + wf.connect( + anat_mask_filled, + "out_file", + refined_bold_mask, + "inputspec.anatomical_brain_mask", + ) # get final func mask - func_mask_final = pe.Node(interface=fsl.MultiImageMaths(), - name=f'func_mask_final_{pipe_num}') + func_mask_final = pe.Node( + interface=fsl.MultiImageMaths(), name=f"func_mask_final_{pipe_num}" + ) func_mask_final.inputs.op_string = "-mul %s" - wf.connect(func_tmp_brain_mask, 'out_file', - func_mask_final, 'in_file') + wf.connect(func_tmp_brain_mask, "out_file", func_mask_final, "in_file") - wf.connect(refined_bold_mask, 'outputspec.func_brain_mask', - func_mask_final, 'operand_files') + wf.connect( + refined_bold_mask, + "outputspec.func_brain_mask", + func_mask_final, + "operand_files", + ) - outputs = { - 'space-bold_desc-brain_mask': (func_mask_final, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_mask_final, "out_file")} return (wf, outputs) @nodeblock( - name='bold_mask_anatomical_based', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='Anatomical_Based', - inputs=['desc-preproc_bold', ('desc-brain_T1w', ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])], - outputs=['space-bold_desc-brain_mask'] + name="bold_mask_anatomical_based", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="Anatomical_Based", + inputs=[ + "desc-preproc_bold", + ("desc-brain_T1w", ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]), + ], + outputs=["space-bold_desc-brain_mask"], ) def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): - '''Generate the BOLD mask by basing it off of the anatomical brain mask. + """Generate the BOLD mask by basing it off of the anatomical brain mask. Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. - ''' - + """ # 0. Take single volume of func - func_single_volume = pe.Node(interface=afni.Calc(), - name='func_single_volume') + func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") - func_single_volume.inputs.set( - expr='a', - single_idx=1, - outputtype='NIFTI_GZ' - ) + func_single_volume.inputs.set(expr="a", single_idx=1, outputtype="NIFTI_GZ") node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_single_volume, 'in_file_a') + wf.connect(node, out, func_single_volume, "in_file_a") # 1. Register func head to anat head to get func2anat matrix - linear_reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='func_to_anat_linear_reg') + linear_reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name="func_to_anat_linear_reg" + ) linear_reg_func_to_anat.inputs.dof = 6 - linear_reg_func_to_anat.inputs.interp = 'spline' + linear_reg_func_to_anat.inputs.interp = "spline" linear_reg_func_to_anat.inputs.searchr_x = [30, 30] linear_reg_func_to_anat.inputs.searchr_y = [30, 30] linear_reg_func_to_anat.inputs.searchr_z = [30, 30] - wf.connect(func_single_volume, 'out_file', - linear_reg_func_to_anat, 'in_file') + wf.connect(func_single_volume, "out_file", linear_reg_func_to_anat, "in_file") - node, out = strat_pool.get_data(["desc-preproc_T1w", "desc-reorient_T1w", - "T1w"]) - wf.connect(node, out, linear_reg_func_to_anat, 'reference') + node, out = strat_pool.get_data(["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]) + wf.connect(node, out, linear_reg_func_to_anat, "reference") # 2. Inverse func to anat affine, to get anat-to-func transform - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name='inv_func2anat_affine') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name="inv_func2anat_affine" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect( + linear_reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file" + ) # 3. get BOLD mask # 3.1 Apply anat-to-func transform to transfer anatomical brain to functional space - reg_anat_brain_to_func = pe.Node(interface=fsl.ApplyWarp(), - name='reg_anat_brain_to_func') - reg_anat_brain_to_func.inputs.interp = 'nn' + reg_anat_brain_to_func = pe.Node( + interface=fsl.ApplyWarp(), name="reg_anat_brain_to_func" + ) + reg_anat_brain_to_func.inputs.interp = "nn" reg_anat_brain_to_func.inputs.relwarp = True node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_anat_brain_to_func, 'in_file') + wf.connect(node, out, reg_anat_brain_to_func, "in_file") node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, reg_anat_brain_to_func, 'ref_file') + wf.connect(node, out, reg_anat_brain_to_func, "ref_file") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'premat') + wf.connect(inv_func_to_anat_affine, "out_file", reg_anat_brain_to_func, "premat") # 3.2 Binarize transfered image - func_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name='func_mask_bin') - func_mask_bin.inputs.op_string = '-abs -bin' + func_mask_bin = pe.Node(interface=fsl.ImageMaths(), name="func_mask_bin") + func_mask_bin.inputs.op_string = "-abs -bin" - wf.connect(reg_anat_brain_to_func, 'out_file', - func_mask_bin, 'in_file') + wf.connect(reg_anat_brain_to_func, "out_file", func_mask_bin, "in_file") # 3.3 Fill holes to get BOLD mask - func_mask_fill_holes = pe.Node(interface=afni.MaskTool(), - name='func_mask_fill_holes') + func_mask_fill_holes = pe.Node( + interface=afni.MaskTool(), name="func_mask_fill_holes" + ) func_mask_fill_holes.inputs.fill_holes = True - func_mask_fill_holes.inputs.outputtype = 'NIFTI_GZ' + func_mask_fill_holes.inputs.outputtype = "NIFTI_GZ" - wf.connect(func_mask_bin, 'out_file', - func_mask_fill_holes, 'in_file') + wf.connect(func_mask_bin, "out_file", func_mask_fill_holes, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (func_mask_fill_holes, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_mask_fill_holes, "out_file")} return (wf, outputs) + def anat_brain_to_bold_res(wf_name, cfg, pipe_num): - wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") - - inputNode = pe.Node(util.IdentityInterface(fields=['T1w-template-funcreg', - 'space-template_desc-preproc_T1w']), - name='inputspec') - outputNode = pe.Node(util.IdentityInterface(fields=['space-template_res-bold_desc-brain_T1w']), - name='outputspec') + + inputNode = pe.Node( + util.IdentityInterface( + fields=["T1w-template-funcreg", "space-template_desc-preproc_T1w"] + ), + name="inputspec", + ) + outputNode = pe.Node( + util.IdentityInterface(fields=["space-template_res-bold_desc-brain_T1w"]), + name="outputspec", + ) # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} - anat_brain_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_in_standard_{pipe_num}') + anat_brain_to_func_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"resample_anat_brain_in_standard_{pipe_num}" + ) - anat_brain_to_func_res.inputs.interp = 'spline' + anat_brain_to_func_res.inputs.interp = "spline" anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_to_func_res, 'in_file') - wf.connect(inputNode, 'T1w-template-funcreg', anat_brain_to_func_res, 'ref_file') + wf.connect( + inputNode, "space-template_desc-preproc_T1w", anat_brain_to_func_res, "in_file" + ) + wf.connect(inputNode, "T1w-template-funcreg", anat_brain_to_func_res, "ref_file") - wf.connect(anat_brain_to_func_res, 'out_file', outputNode, 'space-template_res-bold_desc-brain_T1w') + wf.connect( + anat_brain_to_func_res, + "out_file", + outputNode, + "space-template_res-bold_desc-brain_T1w", + ) return wf + def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz wf = pe.Workflow(name=f"{wf_name}_{pipe_num}") - inputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-T1w_mask', - 'space-template_desc-preproc_T1w']), - name='inputspec') - outputNode = pe.Node(util.IdentityInterface(fields=['space-template_desc-bold_mask']), - name='outputspec') - - anat_brain_mask_to_func_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'resample_anat_brain_mask_in_standard_{pipe_num}') - - anat_brain_mask_to_func_res.inputs.interp = 'nn' - anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + inputNode = pe.Node( + util.IdentityInterface( + fields=["space-template_desc-T1w_mask", "space-template_desc-preproc_T1w"] + ), + name="inputspec", + ) + outputNode = pe.Node( + util.IdentityInterface(fields=["space-template_desc-bold_mask"]), + name="outputspec", + ) - wf.connect(inputNode, 'space-template_desc-T1w_mask', anat_brain_mask_to_func_res, 'in_file') - wf.connect(inputNode, 'space-template_desc-preproc_T1w', anat_brain_mask_to_func_res, 'ref_file') - wf.connect(anat_brain_mask_to_func_res, 'out_file', outputNode, 'space-template_desc-bold_mask') + anat_brain_mask_to_func_res = pe.Node( + interface=fsl.ApplyWarp(), + name=f"resample_anat_brain_mask_in_standard_{pipe_num}", + ) + + anat_brain_mask_to_func_res.inputs.interp = "nn" + anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] + + wf.connect( + inputNode, + "space-template_desc-T1w_mask", + anat_brain_mask_to_func_res, + "in_file", + ) + wf.connect( + inputNode, + "space-template_desc-preproc_T1w", + anat_brain_mask_to_func_res, + "ref_file", + ) + wf.connect( + anat_brain_mask_to_func_res, + "out_file", + outputNode, + "space-template_desc-bold_mask", + ) return wf + @nodeblock( - name='bold_mask_anatomical_resampled', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='Anatomical_Resampled', - inputs=['desc-preproc_bold', 'T1w-template-funcreg', 'space-template_desc-preproc_T1w', - 'space-template_desc-brain_mask'], - outputs=['space-template_res-bold_desc-brain_T1w', 'space-template_desc-bold_mask', 'space-bold_desc-brain_mask'] + name="bold_mask_anatomical_resampled", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="Anatomical_Resampled", + inputs=[ + "desc-preproc_bold", + "T1w-template-funcreg", + "space-template_desc-preproc_T1w", + "space-template_desc-brain_mask", + ], + outputs=[ + "space-template_res-bold_desc-brain_T1w", + "space-template_desc-bold_mask", + "space-bold_desc-brain_mask", + ], ) def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): - '''Resample anatomical brain mask in standard space to get BOLD brain mask in standard space + """Resample anatomical brain mask in standard space to get BOLD brain mask in standard space Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. - ''' - + """ anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) - node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + node, out = strat_pool.get_data("space-template_desc-preproc_T1w") + wf.connect( + node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w" + ) - node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') + node, out = strat_pool.get_data("T1w-template-funcreg") + wf.connect(node, out, anat_brain_to_func_res, "inputspec.T1w-template-funcreg") # Create brain masks in this space from the FreeSurfer output (changing resolution) # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - - node, out = strat_pool.get_data('space-template_desc-brain_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res( + wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + node, out = strat_pool.get_data("space-template_desc-brain_mask") + wf.connect( + node, out, anat_brain_mask_to_func_res, "inputspec.space-template_desc-T1w_mask" + ) + + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + anat_brain_mask_to_func_res, + "inputspec.space-template_desc-preproc_T1w", + ) # Resample func mask in template space back to native space func_mask_template_to_native = pe.Node( interface=afni.Resample(), - name=f'resample_func_mask_to_native_{pipe_num}', + name=f"resample_func_mask_to_native_{pipe_num}", mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) - func_mask_template_to_native.inputs.resample_mode = 'NN' - func_mask_template_to_native.inputs.outputtype = 'NIFTI_GZ' - - wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', - func_mask_template_to_native, 'in_file') + mem_x=(0.0115, "in_file", "t"), + ) + func_mask_template_to_native.inputs.resample_mode = "NN" + func_mask_template_to_native.inputs.outputtype = "NIFTI_GZ" + + wf.connect( + anat_brain_mask_to_func_res, + "outputspec.space-template_desc-bold_mask", + func_mask_template_to_native, + "in_file", + ) node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_mask_template_to_native, 'master') + wf.connect(node, out, func_mask_template_to_native, "master") outputs = { - 'space-template_res-bold_desc-brain_T1w': (anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w'), - 'space-template_desc-bold_mask': (anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask'), - 'space-bold_desc-brain_mask': (func_mask_template_to_native, 'out_file') + "space-template_res-bold_desc-brain_T1w": ( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + ), + "space-template_desc-bold_mask": ( + anat_brain_mask_to_func_res, + "outputspec.space-template_desc-bold_mask", + ), + "space-bold_desc-brain_mask": (func_mask_template_to_native, "out_file"), } return (wf, outputs) + @nodeblock( - name='bold_mask_ccs', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run']], - option_key=['functional_preproc', 'func_masking', 'using'], - option_val='CCS_Anatomical_Refined', - inputs=[['desc-motion_bold', 'desc-preproc_bold', 'bold'], 'desc-brain_T1w', - ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w']], - outputs=['space-bold_desc-brain_mask', 'desc-ref_bold'] + name="bold_mask_ccs", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ], + option_key=["functional_preproc", "func_masking", "using"], + option_val="CCS_Anatomical_Refined", + inputs=[ + ["desc-motion_bold", "desc-preproc_bold", "bold"], + "desc-brain_T1w", + ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], + ], + outputs=["space-bold_desc-brain_mask", "desc-ref_bold"], ) def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): - '''Generate the BOLD mask by basing it off of the anatomical brain. + """Generate the BOLD mask by basing it off of the anatomical brain. Adapted from `the BOLD mask method from the CCS pipeline `_. - ''' - + """ # Run 3dAutomask to generate func initial mask - func_tmp_brain_mask = pe.Node(interface=preprocess.Automask(), - name=f'func_tmp_brain_mask_AFNI_{pipe_num}') + func_tmp_brain_mask = pe.Node( + interface=preprocess.Automask(), name=f"func_tmp_brain_mask_AFNI_{pipe_num}" + ) func_tmp_brain_mask.inputs.dilate = 1 - func_tmp_brain_mask.inputs.outputtype = 'NIFTI_GZ' + func_tmp_brain_mask.inputs.outputtype = "NIFTI_GZ" - node, out = strat_pool.get_data(["desc-motion_bold", - "desc-preproc_bold", - "bold"]) - wf.connect(node, out, func_tmp_brain_mask, 'in_file') + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) + wf.connect(node, out, func_tmp_brain_mask, "in_file") # Extract 8th volume as func ROI - func_roi = pe.Node(interface=fsl.ExtractROI(), - name=f'extract_func_roi_{pipe_num}') + func_roi = pe.Node(interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}") func_roi.inputs.t_min = 7 func_roi.inputs.t_size = 1 - node, out = strat_pool.get_data(["desc-motion_bold", - "desc-preproc_bold", - "bold"]) - wf.connect(node, out, func_roi, 'in_file') + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) + wf.connect(node, out, func_roi, "in_file") # Apply func initial mask on func ROI volume - func_tmp_brain = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_func_tmp_brain_{pipe_num}') + func_tmp_brain = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_func_tmp_brain_{pipe_num}" + ) - wf.connect(func_roi, 'roi_file', - func_tmp_brain, 'in_file') + wf.connect(func_roi, "roi_file", func_tmp_brain, "in_file") - wf.connect(func_tmp_brain_mask, 'out_file', - func_tmp_brain, 'mask_file') + wf.connect(func_tmp_brain_mask, "out_file", func_tmp_brain, "mask_file") # Register func tmp brain to anat brain to get func2anat matrix - reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name=f'func_to_anat_linear_reg_{pipe_num}') - reg_func_to_anat.inputs.interp = 'trilinear' - reg_func_to_anat.inputs.cost = 'corratio' + reg_func_to_anat = pe.Node( + interface=fsl.FLIRT(), name=f"func_to_anat_linear_reg_{pipe_num}" + ) + reg_func_to_anat.inputs.interp = "trilinear" + reg_func_to_anat.inputs.cost = "corratio" reg_func_to_anat.inputs.dof = 6 - wf.connect(func_tmp_brain, 'out_file', - reg_func_to_anat, 'in_file') + wf.connect(func_tmp_brain, "out_file", reg_func_to_anat, "in_file") node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_func_to_anat, 'reference') + wf.connect(node, out, reg_func_to_anat, "reference") # Inverse func2anat matrix - inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name=f'inv_func2anat_affine_{pipe_num}') + inv_func_to_anat_affine = pe.Node( + interface=fsl.ConvertXFM(), name=f"inv_func2anat_affine_{pipe_num}" + ) inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect(reg_func_to_anat, "out_matrix_file", inv_func_to_anat_affine, "in_file") # Transform anat brain to func space - reg_anat_brain_to_func = pe.Node(interface=fsl.FLIRT(), - name=f'reg_anat_brain_to_func_{pipe_num}') + reg_anat_brain_to_func = pe.Node( + interface=fsl.FLIRT(), name=f"reg_anat_brain_to_func_{pipe_num}" + ) reg_anat_brain_to_func.inputs.apply_xfm = True - reg_anat_brain_to_func.inputs.interp = 'trilinear' + reg_anat_brain_to_func.inputs.interp = "trilinear" node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_anat_brain_to_func, 'in_file') + wf.connect(node, out, reg_anat_brain_to_func, "in_file") - wf.connect(func_roi, 'roi_file', - reg_anat_brain_to_func, 'reference') + wf.connect(func_roi, "roi_file", reg_anat_brain_to_func, "reference") - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'in_matrix_file') + wf.connect( + inv_func_to_anat_affine, "out_file", reg_anat_brain_to_func, "in_matrix_file" + ) # Binarize and dilate anat brain in func space - bin_anat_brain_in_func = pe.Node(interface=fsl.ImageMaths(), - name=f'bin_anat_brain_in_func_{pipe_num}') - bin_anat_brain_in_func.inputs.op_string = '-bin -dilM' + bin_anat_brain_in_func = pe.Node( + interface=fsl.ImageMaths(), name=f"bin_anat_brain_in_func_{pipe_num}" + ) + bin_anat_brain_in_func.inputs.op_string = "-bin -dilM" - wf.connect(reg_anat_brain_to_func, 'out_file', - bin_anat_brain_in_func, 'in_file') + wf.connect(reg_anat_brain_to_func, "out_file", bin_anat_brain_in_func, "in_file") # Binarize detectable func signals - bin_func = pe.Node(interface=fsl.ImageMaths(), - name=f'bin_func_{pipe_num}') - bin_func.inputs.op_string = '-Tstd -bin' + bin_func = pe.Node(interface=fsl.ImageMaths(), name=f"bin_func_{pipe_num}") + bin_func.inputs.op_string = "-Tstd -bin" - node, out = strat_pool.get_data(["desc-motion_bold", - "desc-preproc_bold", - "bold"]) - wf.connect(node, out, bin_func, 'in_file') + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) + wf.connect(node, out, bin_func, "in_file") # Take intersection of masks - merge_func_mask = pe.Node(util.Merge(2), - name=f'merge_func_mask_{pipe_num}') + merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") - wf.connect(func_tmp_brain_mask, 'out_file', - merge_func_mask, 'in1') + wf.connect(func_tmp_brain_mask, "out_file", merge_func_mask, "in1") - wf.connect(bin_anat_brain_in_func, 'out_file', - merge_func_mask, 'in2') + wf.connect(bin_anat_brain_in_func, "out_file", merge_func_mask, "in2") - intersect_mask = pe.Node(interface=fsl.MultiImageMaths(), - name=f'intersect_mask_{pipe_num}') - intersect_mask.inputs.op_string = '-mul %s -mul %s' - intersect_mask.inputs.output_datatype = 'char' + intersect_mask = pe.Node( + interface=fsl.MultiImageMaths(), name=f"intersect_mask_{pipe_num}" + ) + intersect_mask.inputs.op_string = "-mul %s -mul %s" + intersect_mask.inputs.output_datatype = "char" - wf.connect(bin_func, 'out_file', - intersect_mask, 'in_file') + wf.connect(bin_func, "out_file", intersect_mask, "in_file") - wf.connect(merge_func_mask, 'out', - intersect_mask, 'operand_files') + wf.connect(merge_func_mask, "out", intersect_mask, "operand_files") # this is the func input for coreg in ccs # TODO evaluate if it's necessary to use this brain - example_func_brain = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_example_func_brain_{pipe_num}') + example_func_brain = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_example_func_brain_{pipe_num}" + ) - wf.connect(func_roi, 'roi_file', - example_func_brain, 'in_file') + wf.connect(func_roi, "roi_file", example_func_brain, "in_file") - wf.connect(intersect_mask, 'out_file', - example_func_brain, 'mask_file') + wf.connect(intersect_mask, "out_file", example_func_brain, "mask_file") outputs = { - 'space-bold_desc-brain_mask': (intersect_mask, 'out_file'), - 'desc-ref_bold': (example_func_brain, 'out_file') + "space-bold_desc-brain_mask": (intersect_mask, "out_file"), + "desc-ref_bold": (example_func_brain, "out_file"), } return (wf, outputs) @nodeblock( - name='bold_masking', - switch=[['functional_preproc', 'run'], - ['functional_preproc', 'func_masking', 'run'], - ['functional_preproc', 'func_masking', 'apply_func_mask_in_native_space']], - inputs=[('desc-preproc_bold', 'space-bold_desc-brain_mask')], - outputs={'desc-preproc_bold': {'Description': 'The skull-stripped BOLD time-series.', 'SkullStripped': True}, - 'desc-brain_bold': {'Description': 'The skull-stripped BOLD time-series.', 'SkullStripped': True}} + name="bold_masking", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "func_masking", "run"], + ["functional_preproc", "func_masking", "apply_func_mask_in_native_space"], + ], + inputs=[("desc-preproc_bold", "space-bold_desc-brain_mask")], + outputs={ + "desc-preproc_bold": { + "Description": "The skull-stripped BOLD time-series.", + "SkullStripped": True, + }, + "desc-brain_bold": { + "Description": "The skull-stripped BOLD time-series.", + "SkullStripped": True, + }, + }, ) def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): - func_edge_detect = pe.Node(interface=afni_utils.Calc(), - name=f'func_extract_brain_{pipe_num}') + func_edge_detect = pe.Node( + interface=afni_utils.Calc(), name=f"func_extract_brain_{pipe_num}" + ) - func_edge_detect.inputs.expr = 'a*b' - func_edge_detect.inputs.outputtype = 'NIFTI_GZ' + func_edge_detect.inputs.expr = "a*b" + func_edge_detect.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_edge_detect, 'in_file_a') + wf.connect(node, out, func_edge_detect, "in_file_a") node, out = strat_pool.get_data("space-bold_desc-brain_mask") - wf.connect(node, out, func_edge_detect, 'in_file_b') + wf.connect(node, out, func_edge_detect, "in_file_b") outputs = { - 'desc-preproc_bold': (func_edge_detect, 'out_file'), - 'desc-brain_bold': (func_edge_detect, 'out_file') + "desc-preproc_bold": (func_edge_detect, "out_file"), + "desc-brain_bold": (func_edge_detect, "out_file"), } return (wf, outputs) @nodeblock( - name='func_mean', - switch=[['functional_preproc', 'run'], ['functional_preproc', 'generate_func_mean', 'run']], - inputs=['desc-preproc_bold'], - outputs=['desc-mean_bold'] + name="func_mean", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "generate_func_mean", "run"], + ], + inputs=["desc-preproc_bold"], + outputs=["desc-mean_bold"], ) def func_mean(wf, cfg, strat_pool, pipe_num, opt=None): + func_mean = pe.Node(interface=afni_utils.TStat(), name=f"func_mean_{pipe_num}") - func_mean = pe.Node(interface=afni_utils.TStat(), - name=f'func_mean_{pipe_num}') - - func_mean.inputs.options = '-mean' - func_mean.inputs.outputtype = 'NIFTI_GZ' + func_mean.inputs.options = "-mean" + func_mean.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_mean, 'in_file') + wf.connect(node, out, func_mean, "in_file") - outputs = { - 'desc-mean_bold': (func_mean, 'out_file') - } + outputs = {"desc-mean_bold": (func_mean, "out_file")} return (wf, outputs) @nodeblock( - name='func_normalize', - switch=[['functional_preproc', 'run'], ['functional_preproc', 'normalize_func', 'run']], - inputs=['desc-preproc_bold'], - outputs=['desc-preproc_bold'] + name="func_normalize", + switch=[ + ["functional_preproc", "run"], + ["functional_preproc", "normalize_func", "run"], + ], + inputs=["desc-preproc_bold"], + outputs=["desc-preproc_bold"], ) def func_normalize(wf, cfg, strat_pool, pipe_num, opt=None): - func_normalize = pe.Node(interface=fsl.ImageMaths(), - name=f'func_normalize_{pipe_num}', - mem_gb=0.7, - mem_x=(4538494663498653 / - 604462909807314587353088, 'in_file')) - func_normalize.inputs.op_string = '-ing 10000' - func_normalize.inputs.out_data_type = 'float' + func_normalize = pe.Node( + interface=fsl.ImageMaths(), + name=f"func_normalize_{pipe_num}", + mem_gb=0.7, + mem_x=(4538494663498653 / 604462909807314587353088, "in_file"), + ) + func_normalize.inputs.op_string = "-ing 10000" + func_normalize.inputs.out_data_type = "float" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_normalize, 'in_file') + wf.connect(node, out, func_normalize, "in_file") - outputs = { - 'desc-preproc_bold': (func_normalize, 'out_file') - } + outputs = {"desc-preproc_bold": (func_normalize, "out_file")} return (wf, outputs) @nodeblock( - name='func_mask_normalize', - config=['functional_preproc'], - switch=['run'], - inputs=[('desc-preproc_bold', 'space-bold_desc-brain_mask')], - outputs=['space-bold_desc-brain_mask'] + name="func_mask_normalize", + config=["functional_preproc"], + switch=["run"], + inputs=[("desc-preproc_bold", "space-bold_desc-brain_mask")], + outputs=["space-bold_desc-brain_mask"], ) def func_mask_normalize(wf, cfg, strat_pool, pipe_num, opt=None): - - func_mask_normalize = pe.Node(interface=fsl.ImageMaths(), - name=f'func_mask_normalize_{pipe_num}', - mem_gb=0.7, - mem_x=(4538494663498653 / - 604462909807314587353088, 'in_file')) - func_mask_normalize.inputs.op_string = '-Tmin -bin' - func_mask_normalize.inputs.out_data_type = 'char' + func_mask_normalize = pe.Node( + interface=fsl.ImageMaths(), + name=f"func_mask_normalize_{pipe_num}", + mem_gb=0.7, + mem_x=(4538494663498653 / 604462909807314587353088, "in_file"), + ) + func_mask_normalize.inputs.op_string = "-Tmin -bin" + func_mask_normalize.inputs.out_data_type = "char" node, out = strat_pool.get_data("desc-preproc_bold") - wf.connect(node, out, func_mask_normalize, 'in_file') + wf.connect(node, out, func_mask_normalize, "in_file") - outputs = { - 'space-bold_desc-brain_mask': (func_mask_normalize, 'out_file') - } + outputs = {"space-bold_desc-brain_mask": (func_mask_normalize, "out_file")} return (wf, outputs) diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index fb12d49ab7..9b2c389a09 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -16,60 +16,38 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . import os -import copy -import time -import shutil -from CPAC.pipeline.nodeblock import nodeblock -from nipype import config from nipype import logging -from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.afni as afni -import nipype.interfaces.fsl as fsl +from nipype.interfaces import fsl import nipype.interfaces.io as nio -from nipype.interfaces.utility import Merge, IdentityInterface -import nipype.interfaces.utility as util - from indi_aws import aws_utils -from CPAC.utils.utils import concat_list -from CPAC.utils.interfaces.datasink import DataSink -from CPAC.utils.interfaces.function import Function - -import CPAC - -from CPAC.pipeline.cpac_pipeline import initialize_nipype_wf, \ - connect_pipeline, build_anat_preproc_stack, build_T1w_registration_stack,\ - build_segmentation_stack -from CPAC.pipeline.engine import initiate_rpool, ingress_output_dir - +from CPAC.longitudinal_pipeline.longitudinal_preproc import subject_specific_template +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.cpac_pipeline import ( + build_anat_preproc_stack, + build_segmentation_stack, + build_T1w_registration_stack, + connect_pipeline, + initialize_nipype_wf, +) +from CPAC.pipeline.engine import ingress_output_dir, initiate_rpool +from CPAC.pipeline.nodeblock import nodeblock from CPAC.registration import ( create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, - create_wf_calculate_ants_warp + create_wf_calculate_ants_warp, ) - from CPAC.registration.registration import apply_transform - from CPAC.utils.datasource import ( resolve_resolution, - create_anat_datasource, - create_check_for_s3_node -) - -from CPAC.longitudinal_pipeline.longitudinal_preproc import ( - subject_specific_template ) - -from CPAC.utils import find_files, function -from CPAC.utils.outputs import Outputs +from CPAC.utils.interfaces.datasink import DataSink +from CPAC.utils.interfaces.function import Function from CPAC.utils.strategy import Strategy -from CPAC.utils.utils import ( - check_config_resources, - check_prov_for_regtool -) +from CPAC.utils.utils import check_config_resources, check_prov_for_regtool -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") @nodeblock( @@ -80,24 +58,28 @@ outputs=["space-T1w_desc-brain_mask"], ) def mask_T1w_longitudinal_template(wf, cfg, strat_pool, pipe_num, opt=None): + brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), + name=f"longitudinal_anatomical_brain_mask_" f"{pipe_num}", + ) + brain_mask.inputs.args = "-bin" - brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'longitudinal_anatomical_brain_mask_' - f'{pipe_num}') - brain_mask.inputs.args = '-bin' - - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, brain_mask, 'in_file') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, brain_mask, "in_file") - outputs = { - 'space-T1w_desc-brain_mask': (brain_mask, 'out_file') - } + outputs = {"space-T1w_desc-brain_mask": (brain_mask, "out_file")} return (wf, outputs) -def create_datasink(datasink_name, config, subject_id, session_id='', - strat_name='', map_node_iterfield=None): +def create_datasink( + datasink_name, + config, + subject_id, + session_id="", + strat_name="", + map_node_iterfield=None, +): """ Parameters @@ -114,8 +96,7 @@ def create_datasink(datasink_name, config, subject_id, session_id='', """ try: - encrypt_data = bool( - config.pipeline_setup['Amazon-AWS']['s3_encryption']) + encrypt_data = bool(config.pipeline_setup["Amazon-AWS"]["s3_encryption"]) except: encrypt_data = False @@ -123,58 +104,62 @@ def create_datasink(datasink_name, config, subject_id, session_id='', # Extract credentials path for output if it exists try: # Get path to creds file - creds_path = '' - if config.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials']: - creds_path = str(config.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials']) + creds_path = "" + if config.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"]: + creds_path = str( + config.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"] + ) creds_path = os.path.abspath(creds_path) - if config.pipeline_setup['output_directory'][ - 'path'].lower().startswith('s3://'): + if ( + config.pipeline_setup["output_directory"]["path"] + .lower() + .startswith("s3://") + ): # Test for s3 write access - s3_write_access = \ - aws_utils.test_bucket_access(creds_path, - config.pipeline_setup[ - 'output_directory']['path']) + s3_write_access = aws_utils.test_bucket_access( + creds_path, config.pipeline_setup["output_directory"]["path"] + ) if not s3_write_access: - raise Exception('Not able to write to bucket!') + raise Exception("Not able to write to bucket!") except Exception as e: - if config.pipeline_setup['output_directory'][ - 'path'].lower().startswith('s3://'): - err_msg = 'There was an error processing credentials or ' \ - 'accessing the S3 bucket. Check and try again.\n' \ - 'Error: %s' % e + if ( + config.pipeline_setup["output_directory"]["path"] + .lower() + .startswith("s3://") + ): + err_msg = ( + "There was an error processing credentials or " + "accessing the S3 bucket. Check and try again.\n" + "Error: %s" % e + ) raise Exception(err_msg) if map_node_iterfield is not None: ds = pe.MapNode( DataSink(infields=map_node_iterfield), - name='sinker_{}'.format(datasink_name), - iterfield=map_node_iterfield + name=f"sinker_{datasink_name}", + iterfield=map_node_iterfield, ) else: - ds = pe.Node( - DataSink(), - name='sinker_{}'.format(datasink_name) - ) + ds = pe.Node(DataSink(), name=f"sinker_{datasink_name}") - ds.inputs.base_directory = config.pipeline_setup['output_directory'][ - 'path'] + ds.inputs.base_directory = config.pipeline_setup["output_directory"]["path"] ds.inputs.creds_path = creds_path ds.inputs.encrypt_bucket_keys = encrypt_data ds.inputs.container = os.path.join( - 'pipeline_%s_%s' % ( - config.pipeline_setup['pipeline_name'], strat_name), - subject_id, session_id + "pipeline_%s_%s" % (config.pipeline_setup["pipeline_name"], strat_name), + subject_id, + session_id, ) return ds -def connect_anat_preproc_inputs(strat, anat_preproc, strat_name, - strat_nodes_list_list, workflow): +def connect_anat_preproc_inputs( + strat, anat_preproc, strat_name, strat_nodes_list_list, workflow +): """ Parameters ---------- @@ -196,26 +181,23 @@ def connect_anat_preproc_inputs(strat, anat_preproc, strat_name, strat_nodes_list_list : list a list of strat_nodes_list """ - new_strat = strat.fork() - tmp_node, out_key = new_strat['anatomical'] - workflow.connect(tmp_node, out_key, anat_preproc, 'inputspec.anat') + tmp_node, out_key = new_strat["anatomical"] + workflow.connect(tmp_node, out_key, anat_preproc, "inputspec.anat") - tmp_node, out_key = new_strat['template_cmass'] - workflow.connect(tmp_node, out_key, anat_preproc, - 'inputspec.template_cmass') + tmp_node, out_key = new_strat["template_cmass"] + workflow.connect(tmp_node, out_key, anat_preproc, "inputspec.template_cmass") new_strat.append_name(anat_preproc.name) - new_strat.update_resource_pool({ - 'anatomical_brain': ( - anat_preproc, 'outputspec.brain'), - 'anatomical_skull_leaf': ( - anat_preproc, 'outputspec.reorient'), - 'anatomical_brain_mask': ( - anat_preproc, 'outputspec.brain_mask'), - }) + new_strat.update_resource_pool( + { + "anatomical_brain": (anat_preproc, "outputspec.brain"), + "anatomical_skull_leaf": (anat_preproc, "outputspec.reorient"), + "anatomical_brain_mask": (anat_preproc, "outputspec.brain_mask"), + } + ) try: strat_nodes_list_list[strat_name].append(new_strat) @@ -239,10 +221,10 @@ def select_session(session, output_brains, warps): brain_path = None warp_path = None for brain_path in output_brains: - if f'{session}_' in brain_path: + if f"{session}_" in brain_path: break for warp_path in warps: - if f'{session}_' in warp_path: + if f"{session}_" in warp_path: break return (brain_path, warp_path) @@ -255,17 +237,16 @@ def select_session(session, output_brains, warps): outputs=["space-longitudinal_desc-brain_mask"], ) def mask_longitudinal_T1w_brain(wf, cfg, strat_pool, pipe_num, opt=None): - - brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'longitudinal_T1w_brain_mask_{pipe_num}') - brain_mask.inputs.args = '-bin' + brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), + name=f"longitudinal_T1w_brain_mask_{pipe_num}", + ) + brain_mask.inputs.args = "-bin" node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, brain_mask, 'in_file') + wf.connect(node, out, brain_mask, "in_file") - outputs = { - 'space-longitudinal_desc-brain_mask': (brain_mask, 'out_file') - } + outputs = {"space-longitudinal_desc-brain_mask": (brain_mask, "out_file")} return (wf, outputs) @@ -282,46 +263,43 @@ def mask_longitudinal_T1w_brain(wf, cfg, strat_pool, pipe_num, opt=None): ], outputs=["space-template_desc-brain_T1w"], ) -def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - +def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, opt=None): xfm_prov = strat_pool.get_cpac_provenance( - 'from-longitudinal_to-template_mode-image_xfm') + "from-longitudinal_to-template_mode-image_xfm" + ) reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_longitudinal_to_T1template_{pipe_num}', - reg_tool, time_series=False, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_longitudinal_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['ANTs'][ - 'interpolation'] - elif reg_tool == 'fsl': + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'interpolation'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w_brain_template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") - node, out = \ - strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = { - 'space-template_desc-brain_T1w': - (apply_xfm, 'outputspec.output_image') - } + outputs = {"space-template_desc-brain_T1w": (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -357,50 +335,57 @@ def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, ], ) def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - 'from-longitudinal_to-T1w_mode-image_desc-linear_xfm') + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" + ) reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] outputs = {} labels = [ - 'CSF_mask', 'CSF_desc-preproc_mask', 'CSF_probseg', - 'GM_mask', 'GM_desc-preproc_mask', 'GM_probseg', - 'WM_mask', 'WM_desc-preproc_mask', 'WM_probseg', + "CSF_mask", + "CSF_desc-preproc_mask", + "CSF_probseg", + "GM_mask", + "GM_desc-preproc_mask", + "GM_probseg", + "WM_mask", + "WM_desc-preproc_mask", + "WM_probseg", ] for label in labels: - apply_xfm = apply_transform(f'warp_longitudinal_seg_to_T1w_{label}_' - f'{pipe_num}', reg_tool, - time_series=False, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) - - if reg_tool == 'ants': - apply_xfm.inputs.inputspec.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['interpolation'] - elif reg_tool == 'fsl': - apply_xfm.inputs.inputspec.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['FSL-FNIRT']['interpolation'] + apply_xfm = apply_transform( + f"warp_longitudinal_seg_to_T1w_{label}_" f"{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) + + if reg_tool == "ants": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] + elif reg_tool == "fsl": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w_brain_template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") - node, out = \ - strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs[f'label-{label}'] = (apply_xfm, 'outputspec.output_image') + outputs[f"label-{label}"] = (apply_xfm, "outputspec.output_image") return (wf, outputs) @@ -421,42 +406,44 @@ def anat_longitudinal_wf(subject_id, sub_list, config): ------- None """ - # list of lists for every strategy session_id_list = [] session_wfs = {} cpac_dirs = [] - out_dir = config.pipeline_setup['output_directory']['path'] + out_dir = config.pipeline_setup["output_directory"]["path"] - orig_pipe_name = config.pipeline_setup['pipeline_name'] + orig_pipe_name = config.pipeline_setup["pipeline_name"] # Loop over the sessions to create the input for the longitudinal # algorithm for session in sub_list: - - unique_id = session['unique_id'] + unique_id = session["unique_id"] session_id_list.append(unique_id) try: - creds_path = session['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" ' \ - 'session "%s" was not found. Check this path ' \ - 'and try again.' % (creds_path, subject_id, - unique_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) raise Exception(err_msg) else: input_creds_path = None except KeyError: input_creds_path = None - workflow = initialize_nipype_wf(config, sub_list[0], - # just grab the first one for the name - name="anat_longitudinal_pre-preproc") + workflow = initialize_nipype_wf( + config, + sub_list[0], + # just grab the first one for the name + name="anat_longitudinal_pre-preproc", + ) workflow, rpool = initiate_rpool(workflow, config, session) pipeline_blocks = build_anat_preproc_stack(rpool, config) @@ -468,9 +455,10 @@ def anat_longitudinal_wf(subject_id, sub_list, config): workflow.run() - cpac_dir = os.path.join(out_dir, f'pipeline_{orig_pipe_name}', - f'{subject_id}_{unique_id}') - cpac_dirs.append(os.path.join(cpac_dir, 'anat')) + cpac_dir = os.path.join( + out_dir, f"pipeline_{orig_pipe_name}", f"{subject_id}_{unique_id}" + ) + cpac_dirs.append(os.path.join(cpac_dir, "anat")) # Now we have all the anat_preproc set up for every session # loop over the different anat preproc strategies @@ -479,88 +467,108 @@ def anat_longitudinal_wf(subject_id, sub_list, config): for cpac_dir in cpac_dirs: if os.path.isdir(cpac_dir): for filename in os.listdir(cpac_dir): - if 'T1w.nii' in filename: - for tag in filename.split('_'): - if 'desc-' in tag and 'brain' in tag: + if "T1w.nii" in filename: + for tag in filename.split("_"): + if "desc-" in tag and "brain" in tag: if tag not in strats_brain_dct: strats_brain_dct[tag] = [] - strats_brain_dct[tag].append(os.path.join(cpac_dir, - filename)) + strats_brain_dct[tag].append( + os.path.join(cpac_dir, filename) + ) if tag not in strats_head_dct: strats_head_dct[tag] = [] - head_file = filename.replace(tag, 'desc-reorient') - strats_head_dct[tag].append(os.path.join(cpac_dir, - head_file)) + head_file = filename.replace(tag, "desc-reorient") + strats_head_dct[tag].append( + os.path.join(cpac_dir, head_file) + ) for strat in strats_brain_dct.keys(): + wf = initialize_nipype_wf( + config, + sub_list[0], + # just grab the first one for the name + name=f"template_node_{strat}", + ) - wf = initialize_nipype_wf(config, sub_list[0], - # just grab the first one for the name - name=f"template_node_{strat}") - - config.pipeline_setup[ - 'pipeline_name'] = f'longitudinal_{orig_pipe_name}' + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" - template_node_name = f'longitudinal_anat_template_{strat}' + template_node_name = f"longitudinal_anat_template_{strat}" # This node will generate the longitudinal template (the functions are # in longitudinal_preproc) # Later other algorithms could be added to calculate it, like the # multivariate template from ANTS # It would just require to change it here. - template_node = subject_specific_template( - workflow_name=template_node_name - ) + template_node = subject_specific_template(workflow_name=template_node_name) template_node.inputs.set( - avg_method=config.longitudinal_template_generation[ - 'average_method'], - dof=config.longitudinal_template_generation['dof'], - interp=config.longitudinal_template_generation['interp'], - cost=config.longitudinal_template_generation['cost'], + avg_method=config.longitudinal_template_generation["average_method"], + dof=config.longitudinal_template_generation["dof"], + interp=config.longitudinal_template_generation["interp"], + cost=config.longitudinal_template_generation["cost"], convergence_threshold=config.longitudinal_template_generation[ - 'convergence_threshold'], - thread_pool=config.longitudinal_template_generation[ - 'thread_pool'], - unique_id_list=list(session_wfs.keys()) + "convergence_threshold" + ], + thread_pool=config.longitudinal_template_generation["thread_pool"], + unique_id_list=list(session_wfs.keys()), ) template_node.inputs.input_brain_list = strats_brain_dct[strat] template_node.inputs.input_skull_list = strats_head_dct[strat] - long_id = f'longitudinal_{subject_id}_strat-{strat}' + long_id = f"longitudinal_{subject_id}_strat-{strat}" wf, rpool = initiate_rpool(wf, config, part_id=long_id) - rpool.set_data("space-longitudinal_desc-brain_T1w", - template_node, 'brain_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) - rpool.set_data("space-longitudinal_desc-brain_T1w-template", - template_node, 'brain_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-brain_T1w-template", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) - rpool.set_data("space-longitudinal_desc-reorient_T1w", - template_node, 'skull_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-reorient_T1w", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) - rpool.set_data("space-longitudinal_desc-reorient_T1w-template", - template_node, 'skull_template', {}, - "", template_node_name) + rpool.set_data( + "space-longitudinal_desc-reorient_T1w-template", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) pipeline_blocks = [mask_longitudinal_T1w_brain] - pipeline_blocks = build_T1w_registration_stack(rpool, config, - pipeline_blocks) + pipeline_blocks = build_T1w_registration_stack(rpool, config, pipeline_blocks) - pipeline_blocks = build_segmentation_stack(rpool, config, - pipeline_blocks) + pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - excl = ['space-longitudinal_desc-brain_T1w', - 'space-longitudinal_desc-reorient_T1w', - 'space-longitudinal_desc-brain_mask'] + excl = [ + "space-longitudinal_desc-brain_T1w", + "space-longitudinal_desc-reorient_T1w", + "space-longitudinal_desc-brain_mask", + ] rpool.gather_pipes(wf, config, add_excl=excl) # this is going to run multiple times! @@ -568,21 +576,21 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf.run() # now, just write out a copy of the above to each session - config.pipeline_setup['pipeline_name'] = orig_pipe_name + config.pipeline_setup["pipeline_name"] = orig_pipe_name for session in sub_list: - - unique_id = session['unique_id'] + unique_id = session["unique_id"] try: - creds_path = session['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" ' \ - 'session "%s" was not found. Check this path ' \ - 'and try again.' % (creds_path, subject_id, - unique_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) raise Exception(err_msg) else: input_creds_path = None @@ -593,56 +601,64 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf, rpool = initiate_rpool(wf, config, session) - config.pipeline_setup[ - 'pipeline_name'] = f'longitudinal_{orig_pipe_name}' - rpool = ingress_output_dir(config, rpool, long_id, - creds_path=input_creds_path) - - select_node_name = f'select_{unique_id}' - select_sess = pe.Node(Function(input_names=['session', - 'output_brains', - 'warps'], - output_names=['brain_path', - 'warp_path'], - function=select_session), - name=select_node_name) + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + rpool = ingress_output_dir( + config, rpool, long_id, creds_path=input_creds_path + ) + + select_node_name = f"select_{unique_id}" + select_sess = pe.Node( + Function( + input_names=["session", "output_brains", "warps"], + output_names=["brain_path", "warp_path"], + function=select_session, + ), + name=select_node_name, + ) select_sess.inputs.session = unique_id - wf.connect(template_node, 'output_brain_list', select_sess, - 'output_brains') - wf.connect(template_node, 'warp_list', select_sess, 'warps') + wf.connect(template_node, "output_brain_list", select_sess, "output_brains") + wf.connect(template_node, "warp_list", select_sess, "warps") - rpool.set_data("space-longitudinal_desc-brain_T1w", - select_sess, 'brain_path', {}, "", - select_node_name) + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + select_sess, + "brain_path", + {}, + "", + select_node_name, + ) - rpool.set_data("from-T1w_to-longitudinal_mode-image_" - "desc-linear_xfm", - select_sess, 'warp_path', {}, "", - select_node_name) + rpool.set_data( + "from-T1w_to-longitudinal_mode-image_" "desc-linear_xfm", + select_sess, + "warp_path", + {}, + "", + select_node_name, + ) - config.pipeline_setup['pipeline_name'] = orig_pipe_name - excl = ['space-template_desc-brain_T1w', - 'space-T1w_desc-brain_mask'] + config.pipeline_setup["pipeline_name"] = orig_pipe_name + excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] rpool.gather_pipes(wf, config, add_excl=excl) wf.run() # begin single-session stuff again for session in sub_list: - - unique_id = session['unique_id'] + unique_id = session["unique_id"] try: - creds_path = session['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" ' \ - 'session "%s" was not found. Check this path ' \ - 'and try again.' % (creds_path, subject_id, - unique_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) raise Exception(err_msg) else: input_creds_path = None @@ -653,8 +669,10 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf, rpool = initiate_rpool(wf, config, session) - pipeline_blocks = [warp_longitudinal_T1w_to_template, - warp_longitudinal_seg_to_T1w] + pipeline_blocks = [ + warp_longitudinal_T1w_to_template, + warp_longitudinal_seg_to_T1w, + ] wf = connect_pipeline(wf, config, rpool, pipeline_blocks) @@ -665,8 +683,6 @@ def anat_longitudinal_wf(subject_id, sub_list, config): wf.run() - - # TODO check: # 1 func alone works # 2 anat + func works, pass anat strategy list? @@ -687,42 +703,43 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): strat_list_ses_list : list of list a list of strategies; within each strategy, a list of sessions """ - - datasink = pe.Node(nio.DataSink(), name='sinker') - datasink.inputs.base_directory = \ - config.pipeline_setup['working_directory']['path'] + datasink = pe.Node(nio.DataSink(), name="sinker") + datasink.inputs.base_directory = config.pipeline_setup["working_directory"]["path"] session_id_list = [] ses_list_strat_list = {} - workflow_name = 'func_preproc_longitudinal_' + str(subject_id) + workflow_name = "func_preproc_longitudinal_" + str(subject_id) workflow = pe.Workflow(name=workflow_name) - workflow.base_dir = config.pipeline_setup['working_directory']['path'] - workflow.config['execution'] = { - 'hash_method': 'timestamp', - 'crashdump_dir': os.path.abspath( - config.pipeline_setup['crash_directory']['path']) + workflow.base_dir = config.pipeline_setup["working_directory"]["path"] + workflow.config["execution"] = { + "hash_method": "timestamp", + "crashdump_dir": os.path.abspath( + config.pipeline_setup["crash_directory"]["path"] + ), } for sub_dict in sub_list: - if 'func' in sub_dict or 'rest' in sub_dict: - if 'func' in sub_dict: - func_paths_dict = sub_dict['func'] + if "func" in sub_dict or "rest" in sub_dict: + if "func" in sub_dict: + func_paths_dict = sub_dict["func"] else: - func_paths_dict = sub_dict['rest'] + func_paths_dict = sub_dict["rest"] - unique_id = sub_dict['unique_id'] + unique_id = sub_dict["unique_id"] session_id_list.append(unique_id) try: - creds_path = sub_dict['creds_path'] - if creds_path and 'none' not in creds_path.lower(): + creds_path = sub_dict["creds_path"] + if creds_path and "none" not in creds_path.lower(): if os.path.exists(creds_path): input_creds_path = os.path.abspath(creds_path) else: - err_msg = 'Credentials path: "%s" for subject "%s" was not ' \ - 'found. Check this path and try again.' % ( - creds_path, subject_id) + err_msg = ( + 'Credentials path: "%s" for subject "%s" was not ' + "found. Check this path and try again." + % (creds_path, subject_id) + ) raise Exception(err_msg) else: input_creds_path = None @@ -731,7 +748,7 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): strat = Strategy() strat_list = [strat] - node_suffix = '_'.join([subject_id, unique_id]) + node_suffix = "_".join([subject_id, unique_id]) # Functional Ingress Workflow # add optional flag @@ -742,24 +759,23 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): sub_dict, subject_id, input_creds_path, - node_suffix) + node_suffix, + ) # Functional Initial Prep Workflow - workflow, strat_list = connect_func_init(workflow, strat_list, - config, node_suffix) + workflow, strat_list = connect_func_init( + workflow, strat_list, config, node_suffix + ) # Functional Image Preprocessing Workflow - workflow, strat_list = connect_func_preproc(workflow, strat_list, - config, node_suffix) + workflow, strat_list = connect_func_preproc( + workflow, strat_list, config, node_suffix + ) # Distortion Correction - workflow, strat_list = connect_distortion_correction(workflow, - strat_list, - config, - diff, - blip, - fmap_rp_list, - node_suffix) + workflow, strat_list = connect_distortion_correction( + workflow, strat_list, config, diff, blip, fmap_rp_list, node_suffix + ) ses_list_strat_list[node_suffix] = strat_list @@ -771,10 +787,10 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): # TODO rename and reorganize dict # TODO update strat name strat_list_ses_list = {} - strat_list_ses_list['func_default'] = [] + strat_list_ses_list["func_default"] = [] for sub_ses_id, strat_nodes_list in ses_list_strat_list.items(): - strat_list_ses_list['func_default'].append(strat_nodes_list[0]) + strat_list_ses_list["func_default"].append(strat_nodes_list[0]) workflow.run() @@ -795,16 +811,15 @@ def merge_func_preproc(working_directory): skull_list : list a list of func preprocessed skull """ - brain_list = [] skull_list = [] for dirpath, dirnames, filenames in os.walk(working_directory): for f in filenames: - if 'func_get_preprocessed_median' in dirpath and '.nii.gz' in f: + if "func_get_preprocessed_median" in dirpath and ".nii.gz" in f: filepath = os.path.join(dirpath, f) brain_list.append(filepath) - if 'func_get_motion_correct_median' in dirpath and '.nii.gz' in f: + if "func_get_motion_correct_median" in dirpath and ".nii.gz" in f: filepath = os.path.join(dirpath, f) skull_list.append(filepath) @@ -815,70 +830,81 @@ def merge_func_preproc(working_directory): def register_func_longitudinal_template_to_standard( - longitudinal_template_node, c, workflow, strat_init, strat_name): - sub_mem_gb, num_cores_per_sub, num_ants_cores, num_omp_cores = \ + longitudinal_template_node, c, workflow, strat_init, strat_name +): + sub_mem_gb, num_cores_per_sub, num_ants_cores, num_omp_cores = ( check_config_resources(c) + ) strat_init_new = strat_init.fork() - strat_init_new.update_resource_pool({ - 'functional_preprocessed_median': ( - longitudinal_template_node, 'brain_template'), - 'motion_correct_median': ( - longitudinal_template_node, 'skull_template') - }) + strat_init_new.update_resource_pool( + { + "functional_preprocessed_median": ( + longitudinal_template_node, + "brain_template", + ), + "motion_correct_median": (longitudinal_template_node, "skull_template"), + } + ) strat_list = [strat_init_new] new_strat_list = [] - regOption = c.anatomical_preproc[ - 'registration_workflow' - ]['registration']['using'] - - if 'FSL' in regOption: + regOption = c.anatomical_preproc["registration_workflow"]["registration"]["using"] + if "FSL" in regOption: for num_strat, strat in enumerate(strat_list): - flirt_reg_func_mni = create_fsl_flirt_linear_reg( - 'func_mni_flirt_register_%s_%d' % (strat_name, num_strat) + "func_mni_flirt_register_%s_%d" % (strat_name, num_strat) ) - if c.functional_registration['2-func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] not in ["trilinear", - "sinc", "spline"]: + if c.functional_registration["2-func_registration_to_template"][ + "FNIRT_pipelines" + ]["interpolation"] not in ["trilinear", "sinc", "spline"]: err_msg = 'The selected FSL interpolation method may be in the list of values: "trilinear", "sinc", "spline"' raise Exception(err_msg) # Input registration parameters - flirt_reg_func_mni.inputs.inputspec.interp = \ - c.functional_registration['2-func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + flirt_reg_func_mni.inputs.inputspec.interp = c.functional_registration[ + "2-func_registration_to_template" + ]["FNIRT_pipelines"]["interpolation"] - node, out_file = strat['functional_preprocessed_median'] - workflow.connect(node, out_file, - flirt_reg_func_mni, 'inputspec.input_brain') + node, out_file = strat["functional_preprocessed_median"] + workflow.connect( + node, out_file, flirt_reg_func_mni, "inputspec.input_brain" + ) # pass the reference files - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, flirt_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, flirt_reg_func_mni, "inputspec.reference_brain" + ) - if 'ANTS' in regOption: + if "ANTS" in regOption: strat = strat.fork() new_strat_list.append(strat) strat.append_name(flirt_reg_func_mni.name) - strat.update_resource_pool({ - 'registration_method': 'FSL', - 'func_longitudinal_to_mni_linear_xfm': ( - flirt_reg_func_mni, 'outputspec.linear_xfm'), - 'mni_to_func_longitudinal_linear_xfm': ( - flirt_reg_func_mni, 'outputspec.invlinear_xfm'), - 'func_longitudinal_template_to_standard': ( - flirt_reg_func_mni, 'outputspec.output_brain') - }) + strat.update_resource_pool( + { + "registration_method": "FSL", + "func_longitudinal_to_mni_linear_xfm": ( + flirt_reg_func_mni, + "outputspec.linear_xfm", + ), + "mni_to_func_longitudinal_linear_xfm": ( + flirt_reg_func_mni, + "outputspec.invlinear_xfm", + ), + "func_longitudinal_template_to_standard": ( + flirt_reg_func_mni, + "outputspec.output_brain", + ), + } + ) strat_list += new_strat_list @@ -889,51 +915,52 @@ def register_func_longitudinal_template_to_standard( except AttributeError: fsl_linear_reg_only = [0] - if 'FSL' in regOption and 0 in fsl_linear_reg_only: - + if "FSL" in regOption and 0 in fsl_linear_reg_only: for num_strat, strat in enumerate(strat_list): - - if strat.get('registration_method') == 'FSL': - + if strat.get("registration_method") == "FSL": fnirt_reg_func_mni = create_fsl_fnirt_nonlinear_reg( - 'func_mni_fnirt_register_%s_%d' % (strat_name, num_strat) + "func_mni_fnirt_register_%s_%d" % (strat_name, num_strat) ) # brain input - node, out_file = strat['functional_preprocessed_median'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.input_brain') + node, out_file = strat["functional_preprocessed_median"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.input_brain" + ) # brain reference - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.reference_brain" + ) # skull input - node, out_file = strat['motion_correct_median'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.input_skull') + node, out_file = strat["motion_correct_median"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.input_skull" + ) # skull reference - node, out_file = strat['template_skull_for_func_preproc'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, - 'inputspec.reference_skull') + node, out_file = strat["template_skull_for_func_preproc"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.reference_skull" + ) - node, out_file = strat['func_longitudinal_to_mni_linear_xfm'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.linear_aff') + node, out_file = strat["func_longitudinal_to_mni_linear_xfm"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.linear_aff" + ) - node, out_file = strat['template_ref_mask'] - workflow.connect(node, out_file, - fnirt_reg_func_mni, 'inputspec.ref_mask') + node, out_file = strat["template_ref_mask"] + workflow.connect( + node, out_file, fnirt_reg_func_mni, "inputspec.ref_mask" + ) # assign the FSL FNIRT config file specified in pipeline # config.yml - fnirt_reg_func_mni.inputs.inputspec.fnirt_config = \ - c.anatomical_preproc['registration_workflow']['registration'][ - 'FSL-FNIRT']['fnirt_config'] + fnirt_reg_func_mni.inputs.inputspec.fnirt_config = c.anatomical_preproc[ + "registration_workflow" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] if 1 in fsl_linear_reg_only: strat = strat.fork() @@ -941,132 +968,143 @@ def register_func_longitudinal_template_to_standard( strat.append_name(fnirt_reg_func_mni.name) - strat.update_resource_pool({ - 'func_longitudinal_to_mni_nonlinear_xfm': ( - fnirt_reg_func_mni, 'outputspec.nonlinear_xfm'), - 'func_longitudinal_template_to_standard': ( - fnirt_reg_func_mni, 'outputspec.output_brain') - }, override=True) + strat.update_resource_pool( + { + "func_longitudinal_to_mni_nonlinear_xfm": ( + fnirt_reg_func_mni, + "outputspec.nonlinear_xfm", + ), + "func_longitudinal_template_to_standard": ( + fnirt_reg_func_mni, + "outputspec.output_brain", + ), + }, + override=True, + ) strat_list += new_strat_list new_strat_list = [] for num_strat, strat in enumerate(strat_list): - # or run ANTS anatomical-to-MNI registration instead - if 'ANTS' in regOption and \ - strat.get('registration_method') != 'FSL': - - ants_reg_func_mni = \ - create_wf_calculate_ants_warp( - 'func_mni_ants_register_%s_%d' % (strat_name, num_strat), - num_threads=num_ants_cores, - reg_ants_skull= - c.anatomical_preproc['registration_workflow'][ - 'reg_with_skull'] - ) + if "ANTS" in regOption and strat.get("registration_method") != "FSL": + ants_reg_func_mni = create_wf_calculate_ants_warp( + "func_mni_ants_register_%s_%d" % (strat_name, num_strat), + num_threads=num_ants_cores, + reg_ants_skull=c.anatomical_preproc["registration_workflow"][ + "reg_with_skull" + ], + ) - if c.functional_registration['2-func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] not in ['Linear', - 'BSpline', - 'LanczosWindowedSinc']: + if c.functional_registration["2-func_registration_to_template"][ + "ANTs_pipelines" + ]["interpolation"] not in ["Linear", "BSpline", "LanczosWindowedSinc"]: err_msg = 'The selected ANTS interpolation method may be in the list of values: "Linear", "BSpline", "LanczosWindowedSinc"' raise Exception(err_msg) # Input registration parameters - ants_reg_func_mni.inputs.inputspec.interp = \ - c.functional_registration['2-func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] + ants_reg_func_mni.inputs.inputspec.interp = c.functional_registration[ + "2-func_registration_to_template" + ]["ANTs_pipelines"]["interpolation"] # calculating the transform with the skullstripped is # reported to be better, but it requires very high # quality skullstripping. If skullstripping is imprecise # registration with skull is preferred - if c.anatomical_preproc['registration_workflow'][ - 'reg_with_skull']: - + if c.anatomical_preproc["registration_workflow"]["reg_with_skull"]: # get the skull-stripped anatomical from resource pool - node, out_file = strat['functional_preprocessed_median'] + node, out_file = strat["functional_preprocessed_median"] # pass the anatomical to the workflow - workflow.connect(node, out_file, - ants_reg_func_mni, 'inputspec.moving_brain') + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.moving_brain" + ) # get the reorient skull-on anatomical from resource pool - node, out_file = strat['motion_correct_median'] + node, out_file = strat["motion_correct_median"] # pass the anatomical to the workflow - workflow.connect(node, out_file, - ants_reg_func_mni, 'inputspec.moving_skull') + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.moving_skull" + ) # pass the reference file - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, - ants_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.reference_brain" + ) # pass the reference file - node, out_file = strat['template_skull_for_func_preproc'] - workflow.connect(node, out_file, - ants_reg_func_mni, - 'inputspec.reference_skull') + node, out_file = strat["template_skull_for_func_preproc"] + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.reference_skull" + ) else: + node, out_file = strat["functional_preprocessed_median"] - node, out_file = strat['functional_preprocessed_median'] - - workflow.connect(node, out_file, - ants_reg_func_mni, 'inputspec.moving_brain') + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.moving_brain" + ) # pass the reference file - node, out_file = strat['template_brain_for_func_preproc'] - workflow.connect(node, out_file, - ants_reg_func_mni, - 'inputspec.reference_brain') + node, out_file = strat["template_brain_for_func_preproc"] + workflow.connect( + node, out_file, ants_reg_func_mni, "inputspec.reference_brain" + ) # pass the reference mask file - node, out_file = strat['template_brain_mask_for_func_preproc'] + node, out_file = strat["template_brain_mask_for_func_preproc"] workflow.connect( - node, out_file, - ants_reg_func_mni, 'inputspec.reference_mask' + node, out_file, ants_reg_func_mni, "inputspec.reference_mask" ) # pass the reference mask file - node, out_file = strat['functional_brain_mask'] - workflow.connect( - node, out_file, - ants_reg_func_mni, 'inputspec.moving_mask' - ) + node, out_file = strat["functional_brain_mask"] + workflow.connect(node, out_file, ants_reg_func_mni, "inputspec.moving_mask") - ants_reg_func_mni.inputs.inputspec.ants_para = \ - c.anatomical_preproc['registration_workflow']['registration'][ - 'ANTs']['T1_registration'] + ants_reg_func_mni.inputs.inputspec.ants_para = c.anatomical_preproc[ + "registration_workflow" + ]["registration"]["ANTs"]["T1_registration"] ants_reg_func_mni.inputs.inputspec.fixed_image_mask = None strat.append_name(ants_reg_func_mni.name) - strat.update_resource_pool({ - 'registration_method': 'ANTS', - 'ants_initial_xfm': ( - ants_reg_func_mni, 'outputspec.ants_initial_xfm'), - 'ants_rigid_xfm': ( - ants_reg_func_mni, 'outputspec.ants_rigid_xfm'), - 'ants_affine_xfm': ( - ants_reg_func_mni, 'outputspec.ants_affine_xfm'), - 'func_longitudinal_to_mni_nonlinear_xfm': ( - ants_reg_func_mni, 'outputspec.warp_field'), - 'mni_to_func_longitudinal_nonlinear_xfm': ( - ants_reg_func_mni, 'outputspec.inverse_warp_field'), - 'func_longitudinal_to_mni_ants_composite_xfm': ( - ants_reg_func_mni, 'outputspec.composite_transform'), - 'func_longitudinal_template_to_standard': ( - ants_reg_func_mni, 'outputspec.normalized_output_brain') - }) + strat.update_resource_pool( + { + "registration_method": "ANTS", + "ants_initial_xfm": ( + ants_reg_func_mni, + "outputspec.ants_initial_xfm", + ), + "ants_rigid_xfm": (ants_reg_func_mni, "outputspec.ants_rigid_xfm"), + "ants_affine_xfm": ( + ants_reg_func_mni, + "outputspec.ants_affine_xfm", + ), + "func_longitudinal_to_mni_nonlinear_xfm": ( + ants_reg_func_mni, + "outputspec.warp_field", + ), + "mni_to_func_longitudinal_nonlinear_xfm": ( + ants_reg_func_mni, + "outputspec.inverse_warp_field", + ), + "func_longitudinal_to_mni_ants_composite_xfm": ( + ants_reg_func_mni, + "outputspec.composite_transform", + ), + "func_longitudinal_template_to_standard": ( + ants_reg_func_mni, + "outputspec.normalized_output_brain", + ), + } + ) strat_list += new_strat_list - ''' + """ # Func -> T1 Registration (Initial Linear Reg) workflow, strat_list, diff_complete = connect_func_to_anat_init_reg(workflow, strat_list, c) @@ -1075,13 +1113,13 @@ def register_func_longitudinal_template_to_standard( # Func -> T1/EPI Template workflow, strat_list = connect_func_to_template_reg(workflow, strat_list, c) - ''' + """ return workflow, strat_list def func_longitudinal_template_wf(subject_id, strat_list, config): - ''' + """ Parameters ---------- subject_id : string @@ -1094,74 +1132,106 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): Returns ------- None - ''' - - workflow_name = 'func_longitudinal_template_' + str(subject_id) + """ + workflow_name = "func_longitudinal_template_" + str(subject_id) workflow = pe.Workflow(name=workflow_name) - workflow.base_dir = config.pipeline_setup['working_directory']['path'] - workflow.config['execution'] = { - 'hash_method': 'timestamp', - 'crashdump_dir': os.path.abspath( - config.pipeline_setup['crash_directory']['path']) + workflow.base_dir = config.pipeline_setup["working_directory"]["path"] + workflow.config["execution"] = { + "hash_method": "timestamp", + "crashdump_dir": os.path.abspath( + config.pipeline_setup["crash_directory"]["path"] + ), } # strat_nodes_list = strat_list['func_default'] strat_init = Strategy() templates_for_resampling = [ - (config.resolution_for_func_preproc, - config.template_brain_only_for_func, - 'template_brain_for_func_preproc', 'resolution_for_func_preproc'), - (config.resolution_for_func_preproc, config.template_skull_for_func, - 'template_skull_for_func_preproc', 'resolution_for_func_preproc'), - (config.resolution_for_func_preproc, config.ref_mask_for_func, - 'template_ref_mask', 'resolution_for_func_preproc'), + ( + config.resolution_for_func_preproc, + config.template_brain_only_for_func, + "template_brain_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_preproc, + config.template_skull_for_func, + "template_skull_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_preproc, + config.ref_mask_for_func, + "template_ref_mask", + "resolution_for_func_preproc", + ), # TODO check float resolution - (config.resolution_for_func_preproc, - config.functional_registration['2-func_registration_to_template'][ - 'target_template']['EPI_template']['template_epi'], - 'template_epi', 'resolution_for_func_preproc'), - (config.resolution_for_func_derivative, - config.functional_registration['2-func_registration_to_template'][ - 'target_template']['EPI_template']['template_epi'], - 'template_epi_derivative', 'resolution_for_func_derivative'), - (config.resolution_for_func_derivative, - config.template_brain_only_for_func, - 'template_brain_for_func_derivative', 'resolution_for_func_preproc'), ( - config.resolution_for_func_derivative, config.template_skull_for_func, - 'template_skull_for_func_derivative', 'resolution_for_func_preproc'), + config.resolution_for_func_preproc, + config.functional_registration["2-func_registration_to_template"][ + "target_template" + ]["EPI_template"]["template_epi"], + "template_epi", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_derivative, + config.functional_registration["2-func_registration_to_template"][ + "target_template" + ]["EPI_template"]["template_epi"], + "template_epi_derivative", + "resolution_for_func_derivative", + ), + ( + config.resolution_for_func_derivative, + config.template_brain_only_for_func, + "template_brain_for_func_derivative", + "resolution_for_func_preproc", + ), + ( + config.resolution_for_func_derivative, + config.template_skull_for_func, + "template_skull_for_func_derivative", + "resolution_for_func_preproc", + ), ] for resolution, template, template_name, tag in templates_for_resampling: - resampled_template = pe.Node(Function( - input_names=['resolution', 'template', 'template_name', 'tag'], - output_names=['resampled_template'], - function=resolve_resolution, - as_module=True), - name='resampled_' + template_name) + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + template_name, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag - strat_init.update_resource_pool({ - template_name: (resampled_template, 'resampled_template') - }) + strat_init.update_resource_pool( + {template_name: (resampled_template, "resampled_template")} + ) merge_func_preproc_node = pe.Node( - Function(input_names=['working_directory'], - output_names=['brain_list', 'skull_list'], - function=merge_func_preproc, - as_module=True), - name='merge_func_preproc') + Function( + input_names=["working_directory"], + output_names=["brain_list", "skull_list"], + function=merge_func_preproc, + as_module=True, + ), + name="merge_func_preproc", + ) - merge_func_preproc_node.inputs.working_directory = \ - config.pipeline_setup['working_directory']['path'] + merge_func_preproc_node.inputs.working_directory = config.pipeline_setup[ + "working_directory" + ]["path"] template_node = subject_specific_template( - workflow_name='subject_specific_func_template_' + subject_id + workflow_name="subject_specific_func_template_" + subject_id ) template_node.inputs.set( @@ -1173,20 +1243,16 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): thread_pool=config.longitudinal_template_thread_pool, ) - workflow.connect(merge_func_preproc_node, 'brain_list', - template_node, 'input_brain_list') + workflow.connect( + merge_func_preproc_node, "brain_list", template_node, "input_brain_list" + ) - workflow.connect(merge_func_preproc_node, 'skull_list', - template_node, 'input_skull_list') + workflow.connect( + merge_func_preproc_node, "skull_list", template_node, "input_skull_list" + ) workflow, strat_list = register_func_longitudinal_template_to_standard( - template_node, - config, - workflow, - strat_init, - 'default' + template_node, config, workflow, strat_init, "default" ) workflow.run() - - return diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 09be47823a..3b7451d8b8 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -17,52 +17,58 @@ import ast import copy import hashlib -import json from itertools import chain +import json import logging import os import re -from typing import Any, Optional, Union +from typing import Optional, Union import warnings +from nipype import config # pylint: disable=wrong-import-order +from nipype.interfaces.utility import Rename # pylint: disable=wrong-import-order -from CPAC.pipeline import \ - nipype_pipeline_engine as pe # pylint: disable=ungrouped-imports -from nipype import config, logging # pylint: disable=wrong-import-order -from CPAC.pipeline.nodeblock import NodeBlockFunction # pylint: disable=ungrouped-imports -from nipype.interfaces.utility import \ - Rename # pylint: disable=wrong-import-order from CPAC.image_utils.spatial_smoothing import spatial_smoothing -from CPAC.image_utils.statistical_transforms import z_score_standardize, \ - fisher_z_score_standardize +from CPAC.image_utils.statistical_transforms import ( + fisher_z_score_standardize, + z_score_standardize, +) +from CPAC.pipeline import ( + nipype_pipeline_engine as pe, # pylint: disable=ungrouped-imports +) from CPAC.pipeline.check_outputs import ExpectedOutputs +from CPAC.pipeline.nodeblock import ( + NodeBlockFunction, # pylint: disable=ungrouped-imports +) from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set from CPAC.registration.registration import transform_derivative +from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename from CPAC.utils.datasource import ( create_anat_datasource, create_func_datasource, - ingress_func_metadata, create_general_datasource, - resolve_resolution + ingress_func_metadata, + resolve_resolution, ) -from CPAC.utils.interfaces.function import Function from CPAC.utils.interfaces.datasink import DataSink -from CPAC.utils.monitoring import getLogger, LOGTAIL, \ - WARNING_FREESURFER_OFF_WITH_DATA +from CPAC.utils.interfaces.function import Function +from CPAC.utils.monitoring import getLogger, LOGTAIL, WARNING_FREESURFER_OFF_WITH_DATA from CPAC.utils.outputs import Outputs from CPAC.utils.typing import LIST_OR_STR, TUPLE -from CPAC.utils.utils import check_prov_for_regtool, \ - create_id_string, get_last_prov_entry, read_json, write_output_json - -from CPAC.resources.templates.lookup_table import lookup_identifier +from CPAC.utils.utils import ( + check_prov_for_regtool, + create_id_string, + get_last_prov_entry, + read_json, + write_output_json, +) -logger = getLogger('nipype.workflow') +logger = getLogger("nipype.workflow") class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): - if not rpool: self.rpool = {} else: @@ -78,57 +84,71 @@ def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): if cfg: self.cfg = cfg - self.logdir = cfg.pipeline_setup['log_directory']['path'] - - self.num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] - self.num_ants_cores = cfg.pipeline_setup['system_config'][ - 'num_ants_threads'] - - self.ants_interp = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - self.fsl_interp = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] - - self.func_reg = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'run'] - - self.run_smoothing = 'smoothed' in cfg.post_processing[ - 'spatial_smoothing']['output'] - self.smoothing_bool = cfg.post_processing['spatial_smoothing']['run'] - self.run_zscoring = 'z-scored' in cfg.post_processing[ - 'z-scoring']['output'] - self.zscoring_bool = cfg.post_processing['z-scoring']['run'] - self.fwhm = cfg.post_processing['spatial_smoothing']['fwhm'] - self.smooth_opts = cfg.post_processing['spatial_smoothing'][ - 'smoothing_method'] - - self.xfm = ['alff', 'desc-sm_alff', 'desc-zstd_alff', - 'desc-sm-zstd_alff', - 'falff', 'desc-sm_falff', 'desc-zstd_falff', - 'desc-sm-zstd_falff', - 'reho', 'desc-sm_reho', 'desc-zstd_reho', - 'desc-sm-zstd_reho'] + self.logdir = cfg.pipeline_setup["log_directory"]["path"] + + self.num_cpus = cfg.pipeline_setup["system_config"][ + "max_cores_per_participant" + ] + self.num_ants_cores = cfg.pipeline_setup["system_config"][ + "num_ants_threads" + ] + + self.ants_interp = cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["ANTs_pipelines"]["interpolation"] + self.fsl_interp = cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["FNIRT_pipelines"]["interpolation"] + + self.func_reg = cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["run"] + + self.run_smoothing = ( + "smoothed" in cfg.post_processing["spatial_smoothing"]["output"] + ) + self.smoothing_bool = cfg.post_processing["spatial_smoothing"]["run"] + self.run_zscoring = "z-scored" in cfg.post_processing["z-scoring"]["output"] + self.zscoring_bool = cfg.post_processing["z-scoring"]["run"] + self.fwhm = cfg.post_processing["spatial_smoothing"]["fwhm"] + self.smooth_opts = cfg.post_processing["spatial_smoothing"][ + "smoothing_method" + ] + + self.xfm = [ + "alff", + "desc-sm_alff", + "desc-zstd_alff", + "desc-sm-zstd_alff", + "falff", + "desc-sm_falff", + "desc-zstd_falff", + "desc-sm-zstd_falff", + "reho", + "desc-sm_reho", + "desc-zstd_reho", + "desc-sm-zstd_reho", + ] def __repr__(self) -> str: - params = [f"{param}={getattr(self, param)}" for param in - ["rpool", "name", "cfg", "pipe_list"] if - getattr(self, param, None) is not None] + params = [ + f"{param}={getattr(self, param)}" + for param in ["rpool", "name", "cfg", "pipe_list"] + if getattr(self, param, None) is not None + ] return f'ResourcePool({", ".join(params)})' def __str__(self) -> str: if self.name: - return f'ResourcePool({self.name}): {list(self.rpool)}' - return f'ResourcePool: {list(self.rpool)}' + return f"ResourcePool({self.name}): {list(self.rpool)}" + return f"ResourcePool: {list(self.rpool)}" def append_name(self, name): self.name.append(name) - def back_propogate_template_name(self, wf, resource_idx: str, json_info: dict, - id_string: 'pe.Node') -> None: + def back_propogate_template_name( + self, wf, resource_idx: str, json_info: dict, id_string: "pe.Node" + ) -> None: """Find and apply the template name from a resource's provenance Parameters @@ -143,27 +163,29 @@ def back_propogate_template_name(self, wf, resource_idx: str, json_info: dict, ------- None """ - if ('template' in resource_idx and self.check_rpool('derivatives-dir')): - if self.check_rpool('template'): - node, out = self.get_data('template') - wf.connect(node, out, id_string, 'template_desc') - elif 'Template' in json_info: - id_string.inputs.template_desc = json_info['Template'] - elif ('template' in resource_idx and - len(json_info.get('CpacProvenance', [])) > 1): - for resource in source_set(json_info['CpacProvenance']): - source, value = resource.split(':', 1) - if value.startswith('template_' - ) and source != 'FSL-AFNI-bold-ref': + if "template" in resource_idx and self.check_rpool("derivatives-dir"): + if self.check_rpool("template"): + node, out = self.get_data("template") + wf.connect(node, out, id_string, "template_desc") + elif "Template" in json_info: + id_string.inputs.template_desc = json_info["Template"] + elif ( + "template" in resource_idx and len(json_info.get("CpacProvenance", [])) > 1 + ): + for resource in source_set(json_info["CpacProvenance"]): + source, value = resource.split(":", 1) + if value.startswith("template_") and source != "FSL-AFNI-bold-ref": # 'FSL-AFNI-bold-ref' is currently allowed to be in # a different space, so don't use it as the space for # descendents try: - anscestor_json = list(self.rpool.get(source).items() - )[0][1].get('json', {}) - if 'Description' in anscestor_json: + anscestor_json = list(self.rpool.get(source).items())[0][1].get( + "json", {} + ) + if "Description" in anscestor_json: id_string.inputs.template_desc = anscestor_json[ - 'Description'] + "Description" + ] return except (IndexError, KeyError): pass @@ -196,17 +218,19 @@ def get_resources(self): return self.rpool.keys() def copy_rpool(self): - return ResourcePool(rpool=copy.deepcopy(self.get_entire_rpool()), - name=self.name, - cfg=self.cfg, - pipe_list=copy.deepcopy(self.pipe_list)) + return ResourcePool( + rpool=copy.deepcopy(self.get_entire_rpool()), + name=self.name, + cfg=self.cfg, + pipe_list=copy.deepcopy(self.pipe_list), + ) @staticmethod def get_raw_label(resource: str) -> str: """Removes ``desc-*`` label""" - for tag in resource.split('_'): - if 'desc-' in tag: - resource = resource.replace(f'{tag}_', '') + for tag in resource.split("_"): + if "desc-" in tag: + resource = resource.replace(f"{tag}_", "") break return resource @@ -214,32 +238,35 @@ def get_strat_info(self, prov, label=None, logdir=None): strat_info = {} for entry in prov: if isinstance(entry, list): - strat_info[entry[-1].split(':')[0]] = entry + strat_info[entry[-1].split(":")[0]] = entry elif isinstance(entry, str): - strat_info[entry.split(':')[0]] = entry.split(':')[1] + strat_info[entry.split(":")[0]] = entry.split(":")[1] if label: if not logdir: logdir = self.logdir - print(f'\n\nPrinting out strategy info for {label} in {logdir}\n') - write_output_json(strat_info, f'{label}_strat_info', - indent=4, basedir=logdir) + print(f"\n\nPrinting out strategy info for {label} in {logdir}\n") + write_output_json( + strat_info, f"{label}_strat_info", indent=4, basedir=logdir + ) def set_json_info(self, resource, pipe_idx, key, val): - #TODO: actually should probably be able to inititialize resource/pipe_idx + # TODO: actually should probably be able to inititialize resource/pipe_idx if pipe_idx not in self.rpool[resource]: - raise Exception('\n[!] DEV: The pipeline/strat ID does not exist ' - f'in the resource pool.\nResource: {resource}' - f'Pipe idx: {pipe_idx}\nKey: {key}\nVal: {val}\n') + raise Exception( + "\n[!] DEV: The pipeline/strat ID does not exist " + f"in the resource pool.\nResource: {resource}" + f"Pipe idx: {pipe_idx}\nKey: {key}\nVal: {val}\n" + ) else: - if 'json' not in self.rpool[resource][pipe_idx]: - self.rpool[resource][pipe_idx]['json'] = {} - self.rpool[resource][pipe_idx]['json'][key] = val + if "json" not in self.rpool[resource][pipe_idx]: + self.rpool[resource][pipe_idx]["json"] = {} + self.rpool[resource][pipe_idx]["json"][key] = val def get_json_info(self, resource, pipe_idx, key): - #TODO: key checks + # TODO: key checks if not pipe_idx: - for pipe_idx, val in self.rpool[resource].items(): - return val['json'][key] + for pipe_idx, val in self.rpool[resource].items(): + return val["json"][key] return self.rpool[resource][pipe_idx][key] @staticmethod @@ -251,97 +278,127 @@ def get_resource_from_prov(prov): if not len(prov): return None if isinstance(prov[-1], list): - return prov[-1][-1].split(':')[0] + return prov[-1][-1].split(":")[0] elif isinstance(prov[-1], str): - return prov[-1].split(':')[0] + return prov[-1].split(":")[0] def regressor_dct(self, cfg) -> dict: """Returns the regressor dictionary for the current strategy if - one exists. Raises KeyError otherwise.""" + one exists. Raises KeyError otherwise. + """ # pylint: disable=attribute-defined-outside-init - if hasattr(self, '_regressor_dct'): # memoized + if hasattr(self, "_regressor_dct"): # memoized # pylint: disable=access-member-before-definition return self._regressor_dct - key_error = KeyError("[!] No regressors in resource pool. \n\n" - "Try turning on create_regressors or " - "ingress_regressors.") - _nr = cfg['nuisance_corrections', '2-nuisance_regression'] - - if not hasattr(self, 'timeseries'): - if _nr['Regressors']: - self.regressors = {reg["Name"]: reg for reg in _nr['Regressors']} + key_error = KeyError( + "[!] No regressors in resource pool. \n\n" + "Try turning on create_regressors or " + "ingress_regressors." + ) + _nr = cfg["nuisance_corrections", "2-nuisance_regression"] + + if not hasattr(self, "timeseries"): + if _nr["Regressors"]: + self.regressors = {reg["Name"]: reg for reg in _nr["Regressors"]} else: self.regressors = [] - if self.check_rpool('parsed_regressors'): # ingressed regressor + if self.check_rpool("parsed_regressors"): # ingressed regressor # name regressor workflow without regressor_prov - strat_name = _nr['ingress_regressors']['Regressors']['Name'] + strat_name = _nr["ingress_regressors"]["Regressors"]["Name"] if strat_name in self.regressors: self._regressor_dct = self.regressors[strat_name] return self._regressor_dct - self.regressor_dct = _nr['ingress_regressors']['Regressors'] + self.regressor_dct = _nr["ingress_regressors"]["Regressors"] return self.regressor_dct - prov = self.get_cpac_provenance('desc-confounds_timeseries') - strat_name_components = prov[-1].split('_') - for _ in list(range(prov[-1].count('_'))): - reg_name = '_'.join(strat_name_components[-_:]) + prov = self.get_cpac_provenance("desc-confounds_timeseries") + strat_name_components = prov[-1].split("_") + for _ in list(range(prov[-1].count("_"))): + reg_name = "_".join(strat_name_components[-_:]) if reg_name in self.regressors: self._regressor_dct = self.regressors[reg_name] return self._regressor_dct raise key_error - def set_data(self, resource, node, output, json_info, pipe_idx, node_name, - fork=False, inject=False): + def set_data( + self, + resource, + node, + output, + json_info, + pipe_idx, + node_name, + fork=False, + inject=False, + ): json_info = json_info.copy() cpac_prov = [] - if 'CpacProvenance' in json_info: - cpac_prov = json_info['CpacProvenance'] + if "CpacProvenance" in json_info: + cpac_prov = json_info["CpacProvenance"] current_prov_list = list(cpac_prov) - new_prov_list = list(cpac_prov) # <---- making a copy, it was already a list + new_prov_list = list(cpac_prov) # <---- making a copy, it was already a list if not inject: - new_prov_list.append(f'{resource}:{node_name}') + new_prov_list.append(f"{resource}:{node_name}") try: res, new_pipe_idx = self.generate_prov_string(new_prov_list) except IndexError: - raise IndexError(f'\n\nThe set_data() call for {resource} has no ' - 'provenance information and should not be an ' - 'injection.') + raise IndexError( + f"\n\nThe set_data() call for {resource} has no " + "provenance information and should not be an " + "injection." + ) if not json_info: - json_info = {'RawSources': [resource]} # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() - json_info['CpacProvenance'] = new_prov_list + json_info = { + "RawSources": [resource] + } # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() + json_info["CpacProvenance"] = new_prov_list if resource not in self.rpool.keys(): self.rpool[resource] = {} - else: - if not fork: # <--- in the event of multiple strategies/options, this will run for every option; just keep in mind - search = False - if self.get_resource_from_prov(current_prov_list) == resource: - pipe_idx = self.generate_prov_string(current_prov_list)[1] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION - if pipe_idx not in self.rpool[resource].keys(): - search = True - else: + elif not fork: # <--- in the event of multiple strategies/options, this will run for every option; just keep in mind + search = False + if self.get_resource_from_prov(current_prov_list) == resource: + pipe_idx = self.generate_prov_string(current_prov_list)[ + 1 + ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + if pipe_idx not in self.rpool[resource].keys(): search = True - if search: - for idx in current_prov_list: - if self.get_resource_from_prov(idx) == resource: - if isinstance(idx, list): - pipe_idx = self.generate_prov_string(idx)[1] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION - elif isinstance(idx, str): - pipe_idx = idx - break - if pipe_idx in self.rpool[resource].keys(): # <--- in case the resource name is now new, and not the original - del self.rpool[resource][pipe_idx] # <--- remove old keys so we don't end up with a new strat for every new node unit (unless we fork) + else: + search = True + if search: + for idx in current_prov_list: + if self.get_resource_from_prov(idx) == resource: + if isinstance(idx, list): + pipe_idx = self.generate_prov_string( + idx + )[ + 1 + ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + elif isinstance(idx, str): + pipe_idx = idx + break + if ( + pipe_idx in self.rpool[resource].keys() + ): # <--- in case the resource name is now new, and not the original + del self.rpool[ + resource + ][ + pipe_idx + ] # <--- remove old keys so we don't end up with a new strat for every new node unit (unless we fork) if new_pipe_idx not in self.rpool[resource]: self.rpool[resource][new_pipe_idx] = {} if new_pipe_idx not in self.pipe_list: self.pipe_list.append(new_pipe_idx) - self.rpool[resource][new_pipe_idx]['data'] = (node, output) - self.rpool[resource][new_pipe_idx]['json'] = json_info + self.rpool[resource][new_pipe_idx]["data"] = (node, output) + self.rpool[resource][new_pipe_idx]["json"] = json_info - def get(self, resource: LIST_OR_STR, pipe_idx: Optional[str] = None, - report_fetched: Optional[bool] = False, - optional: Optional[bool] = False) -> Union[ - TUPLE[Optional[dict], Optional[str]], Optional[dict]]: + def get( + self, + resource: LIST_OR_STR, + pipe_idx: Optional[str] = None, + report_fetched: Optional[bool] = False, + optional: Optional[bool] = False, + ) -> Union[TUPLE[Optional[dict], Optional[str]], Optional[dict]]: # NOTE!!! # if this is the main rpool, this will return a dictionary of strats, and inside those, are dictionaries like {'data': (node, out), 'json': info} # BUT, if this is a sub rpool (i.e. a strat_pool), this will return a one-level dictionary of {'data': (node, out), 'json': info} WITHOUT THE LEVEL OF STRAT KEYS ABOVE IT @@ -373,24 +430,26 @@ def get(self, resource: LIST_OR_STR, pipe_idx: Optional[str] = None, "your C-PAC output directory.\n- If you have done these, " "and you still get this message, please let us know " "through any of our support channels at: " - "https://fcp-indi.github.io/\n") + "https://fcp-indi.github.io/\n" + ) - def get_data(self, resource, pipe_idx=None, report_fetched=False, - quick_single=False): + def get_data( + self, resource, pipe_idx=None, report_fetched=False, quick_single=False + ): if report_fetched: if pipe_idx: - connect, fetched = self.get(resource, pipe_idx=pipe_idx, - report_fetched=report_fetched) - return (connect['data'], fetched) - connect, fetched =self.get(resource, - report_fetched=report_fetched) - return (connect['data'], fetched) + connect, fetched = self.get( + resource, pipe_idx=pipe_idx, report_fetched=report_fetched + ) + return (connect["data"], fetched) + connect, fetched = self.get(resource, report_fetched=report_fetched) + return (connect["data"], fetched) elif pipe_idx: - return self.get(resource, pipe_idx=pipe_idx)['data'] + return self.get(resource, pipe_idx=pipe_idx)["data"] elif quick_single or len(self.get(resource)) == 1: for key, val in self.get(resource).items(): - return val['data'] - return self.get(resource)['data'] + return val["data"] + return self.get(resource)["data"] def copy_resource(self, resource, new_name): try: @@ -416,12 +475,14 @@ def get_json(self, resource, strat=None): # TODO: the below hits the exception if you use get_cpac_provenance on # TODO: the main rpool (i.e. if strat=None) - if 'json' in resource_strat_dct: - strat_json = resource_strat_dct['json'] + if "json" in resource_strat_dct: + strat_json = resource_strat_dct["json"] else: - raise Exception('\n[!] Developer info: the JSON ' - f'information for {resource} and {strat} ' - f'is incomplete.\n') + raise Exception( + "\n[!] Developer info: the JSON " + f"information for {resource} and {strat} " + f"is incomplete.\n" + ) return strat_json def get_cpac_provenance(self, resource, strat=None): @@ -434,7 +495,7 @@ def get_cpac_provenance(self, resource, strat=None): except KeyError: continue json_data = self.get_json(resource, strat) - return json_data['CpacProvenance'] + return json_data["CpacProvenance"] @staticmethod def generate_prov_string(prov): @@ -442,17 +503,21 @@ def generate_prov_string(prov): # MULTIPLE PRECEDING RESOURCES (or single, if just one) # NOTE: this DOES NOT merge multiple resources!!! (i.e. for merging-strat pipe_idx generation) if not isinstance(prov, list): - raise Exception('\n[!] Developer info: the CpacProvenance ' - f'entry for {prov} has to be a list.\n') + raise Exception( + "\n[!] Developer info: the CpacProvenance " + f"entry for {prov} has to be a list.\n" + ) last_entry = get_last_prov_entry(prov) - resource = last_entry.split(':')[0] + resource = last_entry.split(":")[0] return (resource, str(prov)) @staticmethod def generate_prov_list(prov_str): if not isinstance(prov_str, str): - raise Exception('\n[!] Developer info: the CpacProvenance ' - f'entry for {str(prov_str)} has to be a string.\n') + raise Exception( + "\n[!] Developer info: the CpacProvenance " + f"entry for {prov_str!s} has to be a string.\n" + ) return ast.literal_eval(prov_str) @staticmethod @@ -464,15 +529,15 @@ def get_resource_strats_from_prov(prov): # {rpool entry}: {that entry's provenance} resource_strat_dct = {} if isinstance(prov, str): - resource = prov.split(':')[0] + resource = prov.split(":")[0] resource_strat_dct[resource] = prov else: for spot, entry in enumerate(prov): if isinstance(entry, list): - resource = entry[-1].split(':')[0] + resource = entry[-1].split(":")[0] resource_strat_dct[resource] = entry elif isinstance(entry, str): - resource = entry.split(':')[0] + resource = entry.split(":")[0] resource_strat_dct[resource] = entry return resource_strat_dct @@ -489,7 +554,6 @@ def flatten_prov(self, prov): return flat_prov def get_strats(self, resources, debug=False): - # TODO: NOTE: NOT COMPATIBLE WITH SUB-RPOOL/STRAT_POOLS # TODO: (and it doesn't have to be) @@ -498,16 +562,16 @@ def get_strats(self, resources, debug=False): linked_resources = [] resource_list = [] if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('\nresources: %s', resources) + verbose_logger = getLogger("engine") + verbose_logger.debug("\nresources: %s", resources) for resource in resources: # grab the linked-input tuples if isinstance(resource, tuple): linked = [] for label in list(resource): - rp_dct, fetched_resource = self.get(label, - report_fetched=True, - optional=True) + rp_dct, fetched_resource = self.get( + label, report_fetched=True, optional=True + ) if not rp_dct: continue linked.append(fetched_resource) @@ -522,43 +586,45 @@ def get_strats(self, resources, debug=False): variant_pool = {} len_inputs = len(resource_list) if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('linked_resources: %s', - linked_resources) - verbose_logger.debug('resource_list: %s', resource_list) + verbose_logger = getLogger("engine") + verbose_logger.debug("linked_resources: %s", linked_resources) + verbose_logger.debug("resource_list: %s", resource_list) for resource in resource_list: - rp_dct, fetched_resource = self.get(resource, - report_fetched=True, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath - optional=True) # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be + rp_dct, fetched_resource = self.get( + resource, + report_fetched=True, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath + optional=True, + ) # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be if not rp_dct: len_inputs -= 1 continue sub_pool = [] if debug: - verbose_logger.debug('len(rp_dct): %s\n', len(rp_dct)) + verbose_logger.debug("len(rp_dct): %s\n", len(rp_dct)) for strat in rp_dct.keys(): json_info = self.get_json(fetched_resource, strat) - cpac_prov = json_info['CpacProvenance'] + cpac_prov = json_info["CpacProvenance"] sub_pool.append(cpac_prov) if fetched_resource not in variant_pool: variant_pool[fetched_resource] = [] - if 'CpacVariant' in json_info: - for key, val in json_info['CpacVariant'].items(): + if "CpacVariant" in json_info: + for key, val in json_info["CpacVariant"].items(): if val not in variant_pool[fetched_resource]: variant_pool[fetched_resource] += val - variant_pool[fetched_resource].append( - f'NO-{val[0]}') + variant_pool[fetched_resource].append(f"NO-{val[0]}") if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('%s sub_pool: %s\n', resource, sub_pool) + verbose_logger = getLogger("engine") + verbose_logger.debug("%s sub_pool: %s\n", resource, sub_pool) total_pool.append(sub_pool) if not total_pool: - raise LookupError('\n\n[!] C-PAC says: None of the listed ' - 'resources in the node block being connected ' - 'exist in the resource pool.\n\nResources:\n' - '%s\n\n' % resource_list) + raise LookupError( + "\n\n[!] C-PAC says: None of the listed " + "resources in the node block being connected " + "exist in the resource pool.\n\nResources:\n" + "%s\n\n" % resource_list + ) # TODO: right now total_pool is: # TODO: [[[T1w:anat_ingress, desc-preproc_T1w:anatomical_init, desc-preproc_T1w:acpc_alignment], [T1w:anat_ingress,desc-preproc_T1w:anatomical_init]], @@ -576,7 +642,7 @@ def get_strats(self, resources, debug=False): new_strats = {} # get rid of duplicates - TODO: refactor .product - strat_str_list = [] + strat_str_list = [] strat_list_list = [] for strat_tuple in strats: strat_list = list(copy.deepcopy(strat_tuple)) @@ -586,18 +652,14 @@ def get_strats(self, resources, debug=False): strat_list_list.append(strat_list) if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('len(strat_list_list): %s\n', - len(strat_list_list)) + verbose_logger = getLogger("engine") + verbose_logger.debug("len(strat_list_list): %s\n", len(strat_list_list)) for strat_list in strat_list_list: - json_dct = {} for strat in strat_list: # strat is a prov list for a single resource/input - strat_resource, strat_idx = \ - self.generate_prov_string(strat) - strat_json = self.get_json(strat_resource, - strat=strat_idx) + strat_resource, strat_idx = self.generate_prov_string(strat) + strat_json = self.get_json(strat_resource, strat=strat_idx) json_dct[strat_resource] = strat_json drop = False @@ -613,38 +675,38 @@ def get_strats(self, resources, debug=False): if xlabel == ylabel: continue yjson = copy.deepcopy(json_dct[ylabel]) - - if 'CpacVariant' not in xjson: - xjson['CpacVariant'] = {} - if 'CpacVariant' not in yjson: - yjson['CpacVariant'] = {} - + + if "CpacVariant" not in xjson: + xjson["CpacVariant"] = {} + if "CpacVariant" not in yjson: + yjson["CpacVariant"] = {} + current_strat = [] - for key, val in xjson['CpacVariant'].items(): + for key, val in xjson["CpacVariant"].items(): if isinstance(val, list): current_strat.append(val[0]) else: current_strat.append(val) current_spread = list(set(variant_pool[xlabel])) for spread_label in current_spread: - if 'NO-' in spread_label: + if "NO-" in spread_label: continue if spread_label not in current_strat: - current_strat.append(f'NO-{spread_label}') - + current_strat.append(f"NO-{spread_label}") + other_strat = [] - for key, val in yjson['CpacVariant'].items(): + for key, val in yjson["CpacVariant"].items(): if isinstance(val, list): other_strat.append(val[0]) else: other_strat.append(val) other_spread = list(set(variant_pool[ylabel])) for spread_label in other_spread: - if 'NO-' in spread_label: + if "NO-" in spread_label: continue if spread_label not in other_strat: - other_strat.append(f'NO-{spread_label}') - + other_strat.append(f"NO-{spread_label}") + for variant in current_spread: in_current_strat = False in_other_strat = False @@ -671,7 +733,7 @@ def get_strats(self, resources, debug=False): if in_other_spread: if not in_current_strat: drop = True - break + break if drop: break if drop: @@ -680,62 +742,84 @@ def get_strats(self, resources, debug=False): # make the merged strat label from the multiple inputs # strat_list is actually the merged CpacProvenance lists pipe_idx = str(strat_list) - new_strats[pipe_idx] = ResourcePool() # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! - + new_strats[pipe_idx] = ( + ResourcePool() + ) # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! + # placing JSON info at one level higher only for copy convenience - new_strats[pipe_idx].rpool['json'] = {} - new_strats[pipe_idx].rpool['json']['subjson'] = {} - new_strats[pipe_idx].rpool['json']['CpacProvenance'] = strat_list + new_strats[pipe_idx].rpool["json"] = {} + new_strats[pipe_idx].rpool["json"]["subjson"] = {} + new_strats[pipe_idx].rpool["json"]["CpacProvenance"] = strat_list # now just invert resource:strat to strat:resource for each resource:strat for cpac_prov in strat_list: resource, strat = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][strat] # <----- remember, this is the dct of 'data' and 'json'. - new_strats[pipe_idx].rpool[resource] = resource_strat_dct # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. + resource_strat_dct = self.rpool[resource][ + strat + ] # <----- remember, this is the dct of 'data' and 'json'. + new_strats[pipe_idx].rpool[resource] = ( + resource_strat_dct # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. + ) self.pipe_list.append(pipe_idx) - if 'CpacVariant' in resource_strat_dct['json']: - if 'CpacVariant' not in new_strats[pipe_idx].rpool['json']: - new_strats[pipe_idx].rpool['json']['CpacVariant'] = {} - for younger_resource, variant_list in resource_strat_dct['json']['CpacVariant'].items(): - if younger_resource not in new_strats[pipe_idx].rpool['json']['CpacVariant']: - new_strats[pipe_idx].rpool['json']['CpacVariant'][younger_resource] = variant_list + if "CpacVariant" in resource_strat_dct["json"]: + if "CpacVariant" not in new_strats[pipe_idx].rpool["json"]: + new_strats[pipe_idx].rpool["json"]["CpacVariant"] = {} + for younger_resource, variant_list in resource_strat_dct[ + "json" + ]["CpacVariant"].items(): + if ( + younger_resource + not in new_strats[pipe_idx].rpool["json"]["CpacVariant"] + ): + new_strats[pipe_idx].rpool["json"]["CpacVariant"][ + younger_resource + ] = variant_list # preserve each input's JSON info also - data_type = resource.split('_')[-1] - if data_type not in new_strats[pipe_idx].rpool['json']['subjson']: - new_strats[pipe_idx].rpool['json']['subjson'][data_type] = {} - new_strats[pipe_idx].rpool['json']['subjson'][data_type].update(copy.deepcopy(resource_strat_dct['json'])) + data_type = resource.split("_")[-1] + if data_type not in new_strats[pipe_idx].rpool["json"]["subjson"]: + new_strats[pipe_idx].rpool["json"]["subjson"][data_type] = {} + new_strats[pipe_idx].rpool["json"]["subjson"][data_type].update( + copy.deepcopy(resource_strat_dct["json"]) + ) else: new_strats = {} - for resource_strat_list in total_pool: # total_pool will have only one list of strats, for the one input - for cpac_prov in resource_strat_list: # <------- cpac_prov here doesn't need to be modified, because it's not merging with other inputs + for resource_strat_list in ( + total_pool + ): # total_pool will have only one list of strats, for the one input + for cpac_prov in resource_strat_list: # <------- cpac_prov here doesn't need to be modified, because it's not merging with other inputs resource, pipe_idx = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][pipe_idx] # <----- remember, this is the dct of 'data' and 'json'. - new_strats[pipe_idx] = ResourcePool(rpool={resource: resource_strat_dct}) # <----- again, new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! + resource_strat_dct = self.rpool[resource][ + pipe_idx + ] # <----- remember, this is the dct of 'data' and 'json'. + new_strats[pipe_idx] = ResourcePool( + rpool={resource: resource_strat_dct} + ) # <----- again, new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! # placing JSON info at one level higher only for copy convenience - new_strats[pipe_idx].rpool['json'] = resource_strat_dct['json'] # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) - new_strats[pipe_idx].rpool['json']['subjson'] = {} - new_strats[pipe_idx].rpool['json']['CpacProvenance'] = cpac_prov + new_strats[pipe_idx].rpool["json"] = resource_strat_dct[ + "json" + ] # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) + new_strats[pipe_idx].rpool["json"]["subjson"] = {} + new_strats[pipe_idx].rpool["json"]["CpacProvenance"] = cpac_prov # preserve each input's JSON info also - data_type = resource.split('_')[-1] - if data_type not in new_strats[pipe_idx].rpool['json']['subjson']: - new_strats[pipe_idx].rpool['json']['subjson'][data_type] = {} - new_strats[pipe_idx].rpool['json']['subjson'][data_type].update(copy.deepcopy(resource_strat_dct['json'])) + data_type = resource.split("_")[-1] + if data_type not in new_strats[pipe_idx].rpool["json"]["subjson"]: + new_strats[pipe_idx].rpool["json"]["subjson"][data_type] = {} + new_strats[pipe_idx].rpool["json"]["subjson"][data_type].update( + copy.deepcopy(resource_strat_dct["json"]) + ) return new_strats - def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, - pipe_x): - + def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): if label in self.xfm: - json_info = dict(json_info) # get the bold-to-template transform from the current strat_pool # info xfm_idx = None - xfm_label = 'from-bold_to-template_mode-image_xfm' - for entry in json_info['CpacProvenance']: + xfm_label = "from-bold_to-template_mode-image_xfm" + for entry in json_info["CpacProvenance"]: if isinstance(entry, list): - if entry[-1].split(':')[0] == xfm_label: + if entry[-1].split(":")[0] == xfm_label: xfm_prov = entry xfm_idx = self.generate_prov_string(xfm_prov)[1] break @@ -746,40 +830,50 @@ def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, if not xfm_idx: xfm_info = [] for pipe_idx, entry in self.get(xfm_label).items(): - xfm_info.append((pipe_idx, entry['json']['CpacProvenance'])) + xfm_info.append((pipe_idx, entry["json"]["CpacProvenance"])) else: xfm_info = [(xfm_idx, xfm_prov)] for num, xfm_entry in enumerate(xfm_info): - xfm_idx, xfm_prov = xfm_entry reg_tool = check_prov_for_regtool(xfm_prov) - xfm = transform_derivative(f'{label}_xfm_{pipe_x}_{num}', - label, reg_tool, self.num_cpus, - self.num_ants_cores, - ants_interp=self.ants_interp, - fsl_interp=self.fsl_interp, - opt=None) - wf.connect(connection[0], connection[1], - xfm, 'inputspec.in_file') - - node, out = self.get_data("T1w-brain-template-deriv", - quick_single=True) - wf.connect(node, out, xfm, 'inputspec.reference') - - node, out = self.get_data('from-bold_to-template_mode-image_xfm', - pipe_idx=xfm_idx) - wf.connect(node, out, xfm, 'inputspec.transform') - - label = f'space-template_{label}' - json_info['Template'] = self.get_json_info('T1w-brain-template-deriv', - None, 'Description') - new_prov = json_info['CpacProvenance'] + xfm_prov - json_info['CpacProvenance'] = new_prov + xfm = transform_derivative( + f"{label}_xfm_{pipe_x}_{num}", + label, + reg_tool, + self.num_cpus, + self.num_ants_cores, + ants_interp=self.ants_interp, + fsl_interp=self.fsl_interp, + opt=None, + ) + wf.connect(connection[0], connection[1], xfm, "inputspec.in_file") + + node, out = self.get_data("T1w-brain-template-deriv", quick_single=True) + wf.connect(node, out, xfm, "inputspec.reference") + + node, out = self.get_data( + "from-bold_to-template_mode-image_xfm", pipe_idx=xfm_idx + ) + wf.connect(node, out, xfm, "inputspec.transform") + + label = f"space-template_{label}" + json_info["Template"] = self.get_json_info( + "T1w-brain-template-deriv", None, "Description" + ) + new_prov = json_info["CpacProvenance"] + xfm_prov + json_info["CpacProvenance"] = new_prov new_pipe_idx = self.generate_prov_string(new_prov) - self.set_data(label, xfm, 'outputspec.out_file', json_info, - new_pipe_idx, f'{label}_xfm_{num}', fork=True) + self.set_data( + label, + xfm, + "outputspec.out_file", + json_info, + new_pipe_idx, + f"{label}_xfm_{num}", + fork=True, + ) return wf @@ -793,8 +887,9 @@ def filtered_movement(self) -> bool: bool """ try: - return 'motion_estimate_filter' in str(self.get_cpac_provenance( - 'desc-movementParameters_motion')) + return "motion_estimate_filter" in str( + self.get_cpac_provenance("desc-movementParameters_motion") + ) except KeyError: # not a strat_pool or no movement parameters in strat_pool return False @@ -808,49 +903,53 @@ def filter_name(self, cfg) -> str: ------- str """ - motion_filters = cfg['functional_preproc', - 'motion_estimates_and_correction', - 'motion_estimate_filter', 'filters'] - if len(motion_filters) == 1 and cfg.switch_is_on([ - 'functional_preproc', 'motion_estimates_and_correction', - 'motion_estimate_filter', 'run'], exclusive=True + motion_filters = cfg[ + "functional_preproc", + "motion_estimates_and_correction", + "motion_estimate_filter", + "filters", + ] + if len(motion_filters) == 1 and cfg.switch_is_on( + [ + "functional_preproc", + "motion_estimates_and_correction", + "motion_estimate_filter", + "run", + ], + exclusive=True, ): - return motion_filters[0]['Name'] + return motion_filters[0]["Name"] try: - key = 'motion' - sidecar = self.get_json('desc-movementParameters_motion') + key = "motion" + sidecar = self.get_json("desc-movementParameters_motion") except KeyError: sidecar = None - if sidecar is not None and 'CpacVariant' in sidecar: - if sidecar['CpacVariant'][key]: - return sidecar['CpacVariant'][key][0][::-1].split('_', - 1)[0][::-1] - return 'none' - - def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, - outs): + if sidecar is not None and "CpacVariant" in sidecar: + if sidecar["CpacVariant"][key]: + return sidecar["CpacVariant"][key][0][::-1].split("_", 1)[0][::-1] + return "none" - input_type = 'func_derivative' + def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs): + input_type = "func_derivative" post_labels = [(label, connection[0], connection[1])] - if re.match(r'(.*_)?[ed]c[bw]$', label) or re.match(r'(.*_)?lfcd[bw]$', - label): + if re.match(r"(.*_)?[ed]c[bw]$", label) or re.match(r"(.*_)?lfcd[bw]$", label): # suffix: [eigenvector or degree] centrality [binarized or weighted] # or lfcd [binarized or weighted] - mask = 'template-specification-file' - elif 'space-template' in label: - if 'space-template_res-derivative_desc-bold_mask' in self.rpool.keys(): - mask = 'space-template_res-derivative_desc-bold_mask' + mask = "template-specification-file" + elif "space-template" in label: + if "space-template_res-derivative_desc-bold_mask" in self.rpool.keys(): + mask = "space-template_res-derivative_desc-bold_mask" else: - mask = 'space-template_desc-bold_mask' + mask = "space-template_desc-bold_mask" else: - mask = 'space-bold_desc-brain_mask' + mask = "space-bold_desc-brain_mask" mask_idx = None - for entry in json_info['CpacProvenance']: + for entry in json_info["CpacProvenance"]: if isinstance(entry, list): - if entry[-1].split(':')[0] == mask: + if entry[-1].split(":")[0] == mask: mask_prov = entry mask_idx = self.generate_prov_string(mask_prov)[1] break @@ -858,96 +957,119 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, if self.smoothing_bool: if label in Outputs.to_smooth: for smooth_opt in self.smooth_opts: - - sm = spatial_smoothing(f'{label}_smooth_{smooth_opt}_' - f'{pipe_x}', - self.fwhm, input_type, smooth_opt) - wf.connect(connection[0], connection[1], - sm, 'inputspec.in_file') - node, out = self.get_data(mask, pipe_idx=mask_idx, - quick_single=mask_idx is None) - wf.connect(node, out, sm, 'inputspec.mask') - - if 'desc-' not in label: - if 'space-' in label: - for tag in label.split('_'): - if 'space-' in tag: - smlabel = label.replace(tag, - f'{tag}_desc-sm') + sm = spatial_smoothing( + f"{label}_smooth_{smooth_opt}_" f"{pipe_x}", + self.fwhm, + input_type, + smooth_opt, + ) + wf.connect(connection[0], connection[1], sm, "inputspec.in_file") + node, out = self.get_data( + mask, pipe_idx=mask_idx, quick_single=mask_idx is None + ) + wf.connect(node, out, sm, "inputspec.mask") + + if "desc-" not in label: + if "space-" in label: + for tag in label.split("_"): + if "space-" in tag: + smlabel = label.replace(tag, f"{tag}_desc-sm") break else: - smlabel = f'desc-sm_{label}' + smlabel = f"desc-sm_{label}" else: - for tag in label.split('_'): - if 'desc-' in tag: - newtag = f'{tag}-sm' + for tag in label.split("_"): + if "desc-" in tag: + newtag = f"{tag}-sm" smlabel = label.replace(tag, newtag) break - post_labels.append((smlabel, sm, 'outputspec.out_file')) - - self.set_data(smlabel, sm, 'outputspec.out_file', - json_info, pipe_idx, - f'spatial_smoothing_{smooth_opt}', - fork=True) - self.set_data('fwhm', sm, 'outputspec.fwhm', json_info, - pipe_idx, f'spatial_smoothing_{smooth_opt}', - fork=True) - - if self.zscoring_bool: + post_labels.append((smlabel, sm, "outputspec.out_file")) + + self.set_data( + smlabel, + sm, + "outputspec.out_file", + json_info, + pipe_idx, + f"spatial_smoothing_{smooth_opt}", + fork=True, + ) + self.set_data( + "fwhm", + sm, + "outputspec.fwhm", + json_info, + pipe_idx, + f"spatial_smoothing_{smooth_opt}", + fork=True, + ) + + if self.zscoring_bool: for label_con_tpl in post_labels: label = label_con_tpl[0] connection = (label_con_tpl[1], label_con_tpl[2]) if label in Outputs.to_zstd: - zstd = z_score_standardize(f'{label}_zstd_{pipe_x}', - input_type) + zstd = z_score_standardize(f"{label}_zstd_{pipe_x}", input_type) - wf.connect(connection[0], connection[1], - zstd, 'inputspec.in_file') + wf.connect(connection[0], connection[1], zstd, "inputspec.in_file") node, out = self.get_data(mask, pipe_idx=mask_idx) - wf.connect(node, out, zstd, 'inputspec.mask') + wf.connect(node, out, zstd, "inputspec.mask") - if 'desc-' not in label: - if 'space-template' in label: - new_label = label.replace('space-template', - 'space-template_desc-zstd') + if "desc-" not in label: + if "space-template" in label: + new_label = label.replace( + "space-template", "space-template_desc-zstd" + ) else: - new_label = f'desc-zstd_{label}' + new_label = f"desc-zstd_{label}" else: - for tag in label.split('_'): - if 'desc-' in tag: - newtag = f'{tag}-zstd' + for tag in label.split("_"): + if "desc-" in tag: + newtag = f"{tag}-zstd" new_label = label.replace(tag, newtag) break - post_labels.append((new_label, zstd, 'outputspec.out_file')) + post_labels.append((new_label, zstd, "outputspec.out_file")) - self.set_data(new_label, zstd, 'outputspec.out_file', - json_info, pipe_idx, f'zscore_standardize', - fork=True) + self.set_data( + new_label, + zstd, + "outputspec.out_file", + json_info, + pipe_idx, + "zscore_standardize", + fork=True, + ) elif label in Outputs.to_fisherz: + zstd = fisher_z_score_standardize( + f"{label}_zstd_{pipe_x}", label, input_type + ) - zstd = fisher_z_score_standardize(f'{label}_zstd_{pipe_x}', - label, input_type) - - wf.connect(connection[0], connection[1], - zstd, 'inputspec.correlation_file') + wf.connect( + connection[0], connection[1], zstd, "inputspec.correlation_file" + ) # if the output is 'space-template_desc-MeanSCA_correlations', we want # 'desc-MeanSCA_timeseries' - oned = label.replace('correlations', 'timeseries') + oned = label.replace("correlations", "timeseries") node, out = outs[oned] - wf.connect(node, out, zstd, 'inputspec.timeseries_oned') + wf.connect(node, out, zstd, "inputspec.timeseries_oned") - post_labels.append((new_label, zstd, 'outputspec.out_file')) + post_labels.append((new_label, zstd, "outputspec.out_file")) - self.set_data(new_label, zstd, 'outputspec.out_file', - json_info, pipe_idx, - 'fisher_zscore_standardize', - fork=True) + self.set_data( + new_label, + zstd, + "outputspec.out_file", + json_info, + pipe_idx, + "fisher_zscore_standardize", + fork=True, + ) return (wf, post_labels) @@ -960,16 +1082,15 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): if add_excl: excl += add_excl - if 'nonsmoothed' not in cfg.post_processing['spatial_smoothing'][ - 'output']: + if "nonsmoothed" not in cfg.post_processing["spatial_smoothing"]["output"]: excl += Outputs.native_nonsmooth excl += Outputs.template_nonsmooth - if 'raw' not in cfg.post_processing['z-scoring']['output']: + if "raw" not in cfg.post_processing["z-scoring"]["output"]: excl += Outputs.native_raw excl += Outputs.template_raw - if not cfg.pipeline_setup['output_directory']['write_debugging_outputs']: + if not cfg.pipeline_setup["output_directory"]["write_debugging_outputs"]: # substring_excl.append(['bold']) excl += Outputs.debugging @@ -998,45 +1119,43 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): if drop: continue - subdir = 'other' + subdir = "other" if resource in Outputs.anat: - subdir = 'anat' - #TODO: get acq- etc. + subdir = "anat" + # TODO: get acq- etc. elif resource in Outputs.func: - subdir = 'func' - #TODO: other stuff like acq- etc. + subdir = "func" + # TODO: other stuff like acq- etc. for pipe_idx in self.rpool[resource]: unique_id = self.get_name() - part_id = unique_id.split('_')[0] - ses_id = unique_id.split('_')[1] + part_id = unique_id.split("_")[0] + ses_id = unique_id.split("_")[1] - if 'ses-' not in ses_id: + if "ses-" not in ses_id: ses_id = f"ses-{ses_id}" - out_dir = cfg.pipeline_setup['output_directory']['path'] - pipe_name = cfg.pipeline_setup['pipeline_name'] - container = os.path.join(f'pipeline_{pipe_name}', part_id, - ses_id) - filename = f'{unique_id}_{res_in_filename(self.cfg, resource)}' + out_dir = cfg.pipeline_setup["output_directory"]["path"] + pipe_name = cfg.pipeline_setup["pipeline_name"] + container = os.path.join(f"pipeline_{pipe_name}", part_id, ses_id) + filename = f"{unique_id}_{res_in_filename(self.cfg, resource)}" out_path = os.path.join(out_dir, container, subdir, filename) out_dct = { - 'unique_id': unique_id, - 'out_dir': out_dir, - 'container': container, - 'subdir': subdir, - 'filename': filename, - 'out_path': out_path + "unique_id": unique_id, + "out_dir": out_dir, + "container": container, + "subdir": subdir, + "filename": filename, + "out_path": out_path, } - self.rpool[resource][pipe_idx]['out'] = out_dct + self.rpool[resource][pipe_idx]["out"] = out_dct # TODO: have to link the pipe_idx's here. and call up 'desc-preproc_T1w' from a Sources in a json and replace. here. # TODO: can do the pipeline_description.json variants here too! for resource in self.rpool.keys(): - if resource not in Outputs.any: continue @@ -1064,30 +1183,46 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): num_variant = 0 if len(self.rpool[resource]) == 1: num_variant = "" - all_jsons = [self.rpool[resource][pipe_idx]['json'] for pipe_idx in - self.rpool[resource]] - unlabelled = set(key for json_info in all_jsons for key in - json_info.get('CpacVariant', {}).keys() if - key not in (*MOVEMENT_FILTER_KEYS, 'timeseries')) - if 'bold' in unlabelled: + all_jsons = [ + self.rpool[resource][pipe_idx]["json"] + for pipe_idx in self.rpool[resource] + ] + unlabelled = set( + key + for json_info in all_jsons + for key in json_info.get("CpacVariant", {}).keys() + if key not in (*MOVEMENT_FILTER_KEYS, "timeseries") + ) + if "bold" in unlabelled: all_bolds = list( - chain.from_iterable(json_info['CpacVariant']['bold'] for - json_info in all_jsons if - 'CpacVariant' in json_info and - 'bold' in json_info['CpacVariant'])) + chain.from_iterable( + json_info["CpacVariant"]["bold"] + for json_info in all_jsons + if "CpacVariant" in json_info + and "bold" in json_info["CpacVariant"] + ) + ) # not any(not) because all is overloaded as a parameter here - if not any(not re.match(r'apply_(phasediff|blip)_to_' - r'timeseries_separately_.*', _bold) - for _bold in all_bolds): + if not any( + not re.match( + r"apply_(phasediff|blip)_to_" r"timeseries_separately_.*", _bold + ) + for _bold in all_bolds + ): # this fork point should only result in 0 or 1 forks - unlabelled.remove('bold') + unlabelled.remove("bold") del all_bolds - all_forks = {key: set( - chain.from_iterable(json_info['CpacVariant'][key] for - json_info in all_jsons if - 'CpacVariant' in json_info and - key in json_info['CpacVariant'])) for - key in unlabelled} + all_forks = { + key: set( + chain.from_iterable( + json_info["CpacVariant"][key] + for json_info in all_jsons + if "CpacVariant" in json_info + and key in json_info["CpacVariant"] + ) + ) + for key in unlabelled + } # del all_jsons for key, forks in all_forks.items(): if len(forks) < 2: # no int suffix needed if only one fork @@ -1095,8 +1230,8 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # del all_forks for pipe_idx in self.rpool[resource]: pipe_x = self.get_pipe_number(pipe_idx) - json_info = self.rpool[resource][pipe_idx]['json'] - out_dct = self.rpool[resource][pipe_idx]['out'] + json_info = self.rpool[resource][pipe_idx]["json"] + out_dct = self.rpool[resource][pipe_idx]["out"] try: if unlabelled: @@ -1105,157 +1240,173 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): pass try: - del json_info['subjson'] + del json_info["subjson"] except KeyError: pass - if out_dct['subdir'] == 'other' and not all: + if out_dct["subdir"] == "other" and not all: continue - unique_id = out_dct['unique_id'] + unique_id = out_dct["unique_id"] resource_idx = resource if isinstance(num_variant, int): - resource_idx, out_dct = name_fork(resource_idx, cfg, - json_info, out_dct) + resource_idx, out_dct = name_fork( + resource_idx, cfg, json_info, out_dct + ) if unlabelled: - if 'desc-' in out_dct['filename']: - for key in out_dct['filename'].split('_')[::-1]: + if "desc-" in out_dct["filename"]: + for key in out_dct["filename"].split("_")[::-1]: # final `desc` entity - if key.startswith('desc-'): - out_dct['filename'] = out_dct['filename' - ].replace( - key, f'{key}-{num_variant}') + if key.startswith("desc-"): + out_dct["filename"] = out_dct["filename"].replace( + key, f"{key}-{num_variant}" + ) resource_idx = resource_idx.replace( - key, f'{key}-{num_variant}') + key, f"{key}-{num_variant}" + ) break else: - suff = resource.split('_')[-1] - newdesc_suff = f'desc-{num_variant}_{suff}' - resource_idx = resource_idx.replace(suff, - newdesc_suff) - id_string = pe.Node(Function(input_names=['cfg', 'unique_id', - 'resource', - 'scan_id', - 'template_desc', - 'atlas_id', - 'fwhm', - 'subdir', - 'extension'], - output_names=['out_filename'], - function=create_id_string), - name=f'id_string_{resource_idx}_{pipe_x}') + suff = resource.split("_")[-1] + newdesc_suff = f"desc-{num_variant}_{suff}" + resource_idx = resource_idx.replace(suff, newdesc_suff) + id_string = pe.Node( + Function( + input_names=[ + "cfg", + "unique_id", + "resource", + "scan_id", + "template_desc", + "atlas_id", + "fwhm", + "subdir", + "extension", + ], + output_names=["out_filename"], + function=create_id_string, + ), + name=f"id_string_{resource_idx}_{pipe_x}", + ) id_string.inputs.cfg = self.cfg id_string.inputs.unique_id = unique_id id_string.inputs.resource = resource_idx - id_string.inputs.subdir = out_dct['subdir'] + id_string.inputs.subdir = out_dct["subdir"] # grab the iterable scan ID - if out_dct['subdir'] == 'func': - node, out = self.rpool['scan']["['scan:func_ingress']"][ - 'data'] - wf.connect(node, out, id_string, 'scan_id') - - self.back_propogate_template_name(wf, resource_idx, json_info, - id_string) + if out_dct["subdir"] == "func": + node, out = self.rpool["scan"]["['scan:func_ingress']"]["data"] + wf.connect(node, out, id_string, "scan_id") + + self.back_propogate_template_name( + wf, resource_idx, json_info, id_string + ) # grab the FWHM if smoothed - for tag in resource.split('_'): - if 'desc-' in tag and '-sm' in tag: - fwhm_idx = pipe_idx.replace(f'{resource}:', 'fwhm:') + for tag in resource.split("_"): + if "desc-" in tag and "-sm" in tag: + fwhm_idx = pipe_idx.replace(f"{resource}:", "fwhm:") try: - node, out = self.rpool['fwhm'][fwhm_idx]['data'] - wf.connect(node, out, id_string, 'fwhm') + node, out = self.rpool["fwhm"][fwhm_idx]["data"] + wf.connect(node, out, id_string, "fwhm") except KeyError: # smoothing was not done for this resource in the # engine.py smoothing pass break - atlas_suffixes = ['timeseries', 'correlations', 'statmap'] + atlas_suffixes = ["timeseries", "correlations", "statmap"] # grab the iterable atlas ID atlas_id = None - if not resource.endswith('desc-confounds_timeseries'): - if resource.split('_')[-1] in atlas_suffixes: - atlas_idx = pipe_idx.replace(resource, 'atlas_name') + if not resource.endswith("desc-confounds_timeseries"): + if resource.split("_")[-1] in atlas_suffixes: + atlas_idx = pipe_idx.replace(resource, "atlas_name") # need the single quote and the colon inside the double # quotes - it's the encoded pipe_idx - #atlas_idx = new_idx.replace(f"'{temp_rsc}:", + # atlas_idx = new_idx.replace(f"'{temp_rsc}:", # "'atlas_name:") - if atlas_idx in self.rpool['atlas_name']: - node, out = self.rpool['atlas_name'][atlas_idx][ - 'data'] - wf.connect(node, out, id_string, 'atlas_id') - elif 'atlas-' in resource: - for tag in resource.split('_'): - if 'atlas-' in tag: - atlas_id = tag.replace('atlas-', '') + if atlas_idx in self.rpool["atlas_name"]: + node, out = self.rpool["atlas_name"][atlas_idx]["data"] + wf.connect(node, out, id_string, "atlas_id") + elif "atlas-" in resource: + for tag in resource.split("_"): + if "atlas-" in tag: + atlas_id = tag.replace("atlas-", "") id_string.inputs.atlas_id = atlas_id else: - warnings.warn(str( - LookupError("\n[!] No atlas ID found for " - f"{out_dct['filename']}.\n"))) - nii_name = pe.Node(Rename(), name=f'nii_{resource_idx}_' - f'{pipe_x}') + warnings.warn( + str( + LookupError( + "\n[!] No atlas ID found for " + f"{out_dct['filename']}.\n" + ) + ) + ) + nii_name = pe.Node(Rename(), name=f"nii_{resource_idx}_" f"{pipe_x}") nii_name.inputs.keep_ext = True - + if resource in Outputs.ciftis: - nii_name.inputs.keep_ext = False - id_string.inputs.extension = Outputs.ciftis[resource] + nii_name.inputs.keep_ext = False + id_string.inputs.extension = Outputs.ciftis[resource] else: - nii_name.inputs.keep_ext = True - - + nii_name.inputs.keep_ext = True + if resource in Outputs.giftis: + nii_name.inputs.keep_ext = False + id_string.inputs.extension = f"{Outputs.giftis[resource]}.gii" - nii_name.inputs.keep_ext = False - id_string.inputs.extension = f'{Outputs.giftis[resource]}.gii' - else: - nii_name.inputs.keep_ext = True - - wf.connect(id_string, 'out_filename', - nii_name, 'format_string') - - node, out = self.rpool[resource][pipe_idx]['data'] + nii_name.inputs.keep_ext = True + + wf.connect(id_string, "out_filename", nii_name, "format_string") + + node, out = self.rpool[resource][pipe_idx]["data"] try: - wf.connect(node, out, nii_name, 'in_file') + wf.connect(node, out, nii_name, "in_file") except OSError as os_error: logger.warning(os_error) continue - write_json_imports = ['import os', 'import json'] - write_json = pe.Node(Function(input_names=['json_data', - 'filename'], - output_names=['json_file'], - function=write_output_json, - imports=write_json_imports), - name=f'json_{resource_idx}_{pipe_x}') + write_json_imports = ["import os", "import json"] + write_json = pe.Node( + Function( + input_names=["json_data", "filename"], + output_names=["json_file"], + function=write_output_json, + imports=write_json_imports, + ), + name=f"json_{resource_idx}_{pipe_x}", + ) write_json.inputs.json_data = json_info - wf.connect(id_string, 'out_filename', write_json, 'filename') - ds = pe.Node(DataSink(), name=f'sinker_{resource_idx}_' - f'{pipe_x}') + wf.connect(id_string, "out_filename", write_json, "filename") + ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_" f"{pipe_x}") ds.inputs.parameterization = False - ds.inputs.base_directory = out_dct['out_dir'] - ds.inputs.encrypt_bucket_keys = cfg.pipeline_setup[ - 'Amazon-AWS']['s3_encryption'] - ds.inputs.container = out_dct['container'] - - if cfg.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials']: - ds.inputs.creds_path = cfg.pipeline_setup['Amazon-AWS'][ - 'aws_output_bucket_credentials'] - expected_outputs += (out_dct['subdir'], create_id_string( - self.cfg, unique_id, resource_idx, - template_desc=id_string.inputs.template_desc, - atlas_id=atlas_id, subdir=out_dct['subdir'])) - wf.connect(nii_name, 'out_file', - ds, f'{out_dct["subdir"]}.@data') - wf.connect(write_json, 'json_file', - ds, f'{out_dct["subdir"]}.@json') + ds.inputs.base_directory = out_dct["out_dir"] + ds.inputs.encrypt_bucket_keys = cfg.pipeline_setup["Amazon-AWS"][ + "s3_encryption" + ] + ds.inputs.container = out_dct["container"] + + if cfg.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"]: + ds.inputs.creds_path = cfg.pipeline_setup["Amazon-AWS"][ + "aws_output_bucket_credentials" + ] + expected_outputs += ( + out_dct["subdir"], + create_id_string( + self.cfg, + unique_id, + resource_idx, + template_desc=id_string.inputs.template_desc, + atlas_id=atlas_id, + subdir=out_dct["subdir"], + ), + ) + wf.connect(nii_name, "out_file", ds, f'{out_dct["subdir"]}.@data') + wf.connect(write_json, "json_file", ds, f'{out_dct["subdir"]}.@json') outputs_logger.info(expected_outputs) def node_data(self, resource, **kwargs): - '''Factory function to create NodeData objects + """Factory function to create NodeData objects Parameters ---------- @@ -1264,7 +1415,7 @@ def node_data(self, resource, **kwargs): Returns ------- NodeData - ''' + """ return NodeData(self, resource, **kwargs) @@ -1275,8 +1426,7 @@ def __init__(self, node_block_functions, debug=False): self.node_blocks = {} - for node_block_function in node_block_functions: # <---- sets up the NodeBlock object in case you gave it a list of node blocks instead of a single one - for option forking. - + for node_block_function in node_block_functions: # <---- sets up the NodeBlock object in case you gave it a list of node blocks instead of a single one - for option forking. self.input_interface = [] if isinstance(node_block_function, tuple): self.input_interface = node_block_function[1] @@ -1286,9 +1436,11 @@ def __init__(self, node_block_functions, debug=False): if not isinstance(node_block_function, NodeBlockFunction): # If the object is a plain function `__name__` will be more useful then `str()` - obj_str = node_block_function.__name__ \ - if hasattr(node_block_function, '__name__') else \ - str(node_block_function) + obj_str = ( + node_block_function.__name__ + if hasattr(node_block_function, "__name__") + else str(node_block_function) + ) raise TypeError(f'Object is not a nodeblock: "{obj_str}"') name = node_block_function.name @@ -1305,37 +1457,37 @@ def __init__(self, node_block_functions, debug=False): list_tup.append(interface[1]) node_block_function.inputs.remove(orig_input) node_block_function.inputs.append(tuple(list_tup)) - else: - if orig_input == interface[0]: - node_block_function.inputs.remove(interface[0]) - node_block_function.inputs.append(interface[1]) + elif orig_input == interface[0]: + node_block_function.inputs.remove(interface[0]) + node_block_function.inputs.append(interface[1]) for key, val in node_block_function.legacy_nodeblock_dict().items(): self.node_blocks[name][key] = val - self.node_blocks[name]['block_function'] = node_block_function + self.node_blocks[name]["block_function"] = node_block_function - #TODO: fix/replace below + # TODO: fix/replace below self.outputs = {} for out in node_block_function.outputs: self.outputs[out] = None - self.options = ['base'] + self.options = ["base"] if node_block_function.outputs is not None: self.options = node_block_function.outputs - logger.info('Connecting %s...', name) + logger.info("Connecting %s...", name) if debug: - config.update_config( - {'logging': {'workflow_level': 'DEBUG'}}) + config.update_config({"logging": {"workflow_level": "DEBUG"}}) logging.update_logging(config) - logger.debug('"inputs": %s\n\t "outputs": %s%s', - node_block_function.inputs, - list(self.outputs.keys()), - f'\n\t"options": {self.options}' - if self.options != ['base'] else '') - config.update_config( - {'logging': {'workflow_level': 'INFO'}}) + logger.debug( + '"inputs": %s\n\t "outputs": %s%s', + node_block_function.inputs, + list(self.outputs.keys()), + f'\n\t"options": {self.options}' + if self.options != ["base"] + else "", + ) + config.update_config({"logging": {"workflow_level": "INFO"}}) logging.update_logging(config) def get_name(self): @@ -1343,14 +1495,16 @@ def get_name(self): def check_null(self, val): if isinstance(val, str): - val = None if val.lower() == 'none' else val + val = None if val.lower() == "none" else val return val def check_output(self, outputs, label, name): if label not in outputs: - raise NameError(f'\n[!] Output name "{label}" in the block ' - 'function does not match the outputs list ' - f'{outputs} in Node Block "{name}"\n') + raise NameError( + f'\n[!] Output name "{label}" in the block ' + "function does not match the outputs list " + f'{outputs} in Node Block "{name}"\n' + ) def grab_tiered_dct(self, cfg, key_list): cfg_dct = cfg.dict() @@ -1358,17 +1512,19 @@ def grab_tiered_dct(self, cfg, key_list): try: cfg_dct = cfg_dct.get(key, {}) except KeyError: - raise Exception(f"[!] The config provided to the node block is not valid") + raise Exception( + "[!] The config provided to the node block is not valid" + ) return cfg_dct def connect_block(self, wf, cfg, rpool): - debug = cfg.pipeline_setup['Debugging']['verbose'] + debug = cfg.pipeline_setup["Debugging"]["verbose"] all_opts = [] for name, block_dct in self.node_blocks.items(): opts = [] - config = self.check_null(block_dct['config']) - option_key = self.check_null(block_dct['option_key']) - option_val = self.check_null(block_dct['option_val']) + config = self.check_null(block_dct["config"]) + option_key = self.check_null(block_dct["option_key"]) + option_val = self.check_null(block_dct["option_val"]) if option_key and option_val: if not isinstance(option_key, list): option_key = [option_key] @@ -1378,13 +1534,15 @@ def connect_block(self, wf, cfg, rpool): key_list = config + option_key else: key_list = option_key - if 'USER-DEFINED' in option_val: + if "USER-DEFINED" in option_val: # load custom config data into each 'opt' opts = self.grab_tiered_dct(cfg, key_list) else: for option in option_val: try: - if option in self.grab_tiered_dct(cfg, key_list): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if ( + option in self.grab_tiered_dct(cfg, key_list) + ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) except AttributeError as err: raise Exception(f"{err}\nNode Block: {name}") @@ -1395,12 +1553,14 @@ def connect_block(self, wf, cfg, rpool): elif option_key and not option_val: # enables multiple config forking entries if not isinstance(option_key[0], list): - raise Exception(f'[!] The option_key field ({option_key}) ' - f'for {name} exists but there is no ' - 'option_val.\n\nIf you are trying to ' - 'populate multiple option keys, the ' - 'option_val field must contain a list of ' - 'a list.\n') + raise Exception( + f"[!] The option_key field ({option_key}) " + f"for {name} exists but there is no " + "option_val.\n\nIf you are trying to " + "populate multiple option keys, the " + "option_val field must contain a list of " + "a list.\n" + ) for option_config in option_key: # option_config is a list of pipe config levels down to the option if config: @@ -1409,29 +1569,35 @@ def connect_block(self, wf, cfg, rpool): key_list = option_config option_val = option_config[-1] if option_val in self.grab_tiered_dct(cfg, key_list[:-1]): - opts.append(option_val) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + opts.append(option_val) + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! opts = [None] all_opts += opts sidecar_additions = { - 'CpacConfigHash': hashlib.sha1(json.dumps(cfg.dict(), sort_keys=True).encode('utf-8')).hexdigest(), - 'CpacConfig': cfg.dict() + "CpacConfigHash": hashlib.sha1( + json.dumps(cfg.dict(), sort_keys=True).encode("utf-8") + ).hexdigest(), + "CpacConfig": cfg.dict(), } - if cfg['pipeline_setup']['output_directory'].get('user_defined'): - sidecar_additions['UserDefined'] = cfg['pipeline_setup']['output_directory']['user_defined'] - - for name, block_dct in self.node_blocks.items(): # <--- iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. + if cfg["pipeline_setup"]["output_directory"].get("user_defined"): + sidecar_additions["UserDefined"] = cfg["pipeline_setup"][ + "output_directory" + ]["user_defined"] - switch = self.check_null(block_dct['switch']) - config = self.check_null(block_dct['config']) - option_key = self.check_null(block_dct['option_key']) - option_val = self.check_null(block_dct['option_val']) - inputs = self.check_null(block_dct['inputs']) - outputs = self.check_null(block_dct['outputs']) + for ( + name, + block_dct, + ) in self.node_blocks.items(): # <--- iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. + switch = self.check_null(block_dct["switch"]) + config = self.check_null(block_dct["config"]) + option_key = self.check_null(block_dct["option_key"]) + option_val = self.check_null(block_dct["option_val"]) + inputs = self.check_null(block_dct["inputs"]) + outputs = self.check_null(block_dct["outputs"]) - block_function = block_dct['block_function'] + block_function = block_dct["block_function"] opts = [] if option_key and option_val: @@ -1443,15 +1609,19 @@ def connect_block(self, wf, cfg, rpool): key_list = config + option_key else: key_list = option_key - if 'USER-DEFINED' in option_val: + if "USER-DEFINED" in option_val: # load custom config data into each 'opt' opts = self.grab_tiered_dct(cfg, key_list) else: for option in option_val: - if option in self.grab_tiered_dct(cfg, key_list): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if ( + option in self.grab_tiered_dct(cfg, key_list) + ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! - opts = [None] # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + opts = [ + None + ] # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples if not opts: # for node blocks where the options are split into different # block functions - opts will be empty for non-selected @@ -1465,41 +1635,45 @@ def connect_block(self, wf, cfg, rpool): try: key_list = config + switch except TypeError: - raise Exception("\n\n[!] Developer info: Docstring error " - f"for {name}, make sure the 'config' or " - "'switch' fields are lists.\n\n") + raise Exception( + "\n\n[!] Developer info: Docstring error " + f"for {name}, make sure the 'config' or " + "'switch' fields are lists.\n\n" + ) switch = self.grab_tiered_dct(cfg, key_list) - - else: - if isinstance(switch[0], list): - # we have multiple switches, which is designed to only work if - # config is set to "None" - switch_list = [] - for key_list in switch: - val = self.grab_tiered_dct(cfg, key_list) - if isinstance(val, list): - # fork switches - if True in val: - switch_list.append(True) - if False in val: - switch_list.append(False) - else: - switch_list.append(val) - if False in switch_list: - switch = [False] + + elif isinstance(switch[0], list): + # we have multiple switches, which is designed to only work if + # config is set to "None" + switch_list = [] + for key_list in switch: + val = self.grab_tiered_dct(cfg, key_list) + if isinstance(val, list): + # fork switches + if True in val: + switch_list.append(True) + if False in val: + switch_list.append(False) else: - switch = [True] + switch_list.append(val) + if False in switch_list: + switch = [False] else: - # if config is set to "None" - key_list = switch - switch = self.grab_tiered_dct(cfg, key_list) + switch = [True] + else: + # if config is set to "None" + key_list = switch + switch = self.grab_tiered_dct(cfg, key_list) if not isinstance(switch, list): switch = [switch] if True in switch: for pipe_idx, strat_pool in rpool.get_strats( - inputs, debug).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} - fork = False in switch # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } - for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. + inputs, debug + ).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} + fork = ( + False in switch + ) # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } + for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. # remember, you can get 'data' or 'json' from strat_pool with member functions # strat_pool has all of the JSON information of all the inputs! # so when we set_data below for the TOP-LEVEL MAIN RPOOL (not the strat_pool), we can generate new merged JSON information for each output. @@ -1518,130 +1692,155 @@ def connect_block(self, wf, cfg, rpool): strat_pool.copy_resource(input_name, interface[0]) replaced_inputs.append(interface[0]) try: - wf, outs = block_function(wf, cfg, strat_pool, - pipe_x, opt) + wf, outs = block_function(wf, cfg, strat_pool, pipe_x, opt) except IOError as e: # duplicate node logger.warning(e) continue if not outs: - if (block_function.__name__ == 'freesurfer_' - 'postproc'): - logger.warning( - WARNING_FREESURFER_OFF_WITH_DATA) - LOGTAIL['warnings'].append( - WARNING_FREESURFER_OFF_WITH_DATA) + if block_function.__name__ == "freesurfer_" "postproc": + logger.warning(WARNING_FREESURFER_OFF_WITH_DATA) + LOGTAIL["warnings"].append( + WARNING_FREESURFER_OFF_WITH_DATA + ) continue if opt and len(option_val) > 1: - node_name = f'{node_name}_{opt}' - elif opt and 'USER-DEFINED' in option_val: + node_name = f"{node_name}_{opt}" + elif opt and "USER-DEFINED" in option_val: node_name = f'{node_name}_{opt["Name"]}' if debug: - verbose_logger = getLogger('engine') - verbose_logger.debug('\n=======================') - verbose_logger.debug('Node name: %s', node_name) - prov_dct = \ - rpool.get_resource_strats_from_prov( - ast.literal_eval(pipe_idx)) + verbose_logger = getLogger("engine") + verbose_logger.debug("\n=======================") + verbose_logger.debug("Node name: %s", node_name) + prov_dct = rpool.get_resource_strats_from_prov( + ast.literal_eval(pipe_idx) + ) for key, val in prov_dct.items(): - verbose_logger.debug('-------------------') - verbose_logger.debug('Input - %s:', key) - sub_prov_dct = \ - rpool.get_resource_strats_from_prov(val) + verbose_logger.debug("-------------------") + verbose_logger.debug("Input - %s:", key) + sub_prov_dct = rpool.get_resource_strats_from_prov(val) for sub_key, sub_val in sub_prov_dct.items(): - sub_sub_dct = \ - rpool.get_resource_strats_from_prov( - sub_val) - verbose_logger.debug(' sub-input - %s:', - sub_key) - verbose_logger.debug(' prov = %s', - sub_val) + sub_sub_dct = rpool.get_resource_strats_from_prov( + sub_val + ) + verbose_logger.debug(" sub-input - %s:", sub_key) + verbose_logger.debug(" prov = %s", sub_val) verbose_logger.debug( - ' sub_sub_inputs = %s', - sub_sub_dct.keys()) + " sub_sub_inputs = %s", sub_sub_dct.keys() + ) for label, connection in outs.items(): self.check_output(outputs, label, name) - new_json_info = copy.deepcopy(strat_pool.get('json')) + new_json_info = copy.deepcopy(strat_pool.get("json")) # transfer over data-specific json info # for example, if the input data json is _bold and the output is also _bold - data_type = label.split('_')[-1] - if data_type in new_json_info['subjson']: - if 'SkullStripped' in new_json_info['subjson'][data_type]: - new_json_info['SkullStripped'] = new_json_info['subjson'][data_type]['SkullStripped'] - - # determine sources for the outputs, i.e. all input data into the node block - new_json_info['Sources'] = [x for x in strat_pool.get_entire_rpool() if x != 'json' and x not in replaced_inputs] - + data_type = label.split("_")[-1] + if data_type in new_json_info["subjson"]: + if ( + "SkullStripped" + in new_json_info["subjson"][data_type] + ): + new_json_info["SkullStripped"] = new_json_info[ + "subjson" + ][data_type]["SkullStripped"] + + # determine sources for the outputs, i.e. all input data into the node block + new_json_info["Sources"] = [ + x + for x in strat_pool.get_entire_rpool() + if x != "json" and x not in replaced_inputs + ] + if isinstance(outputs, dict): new_json_info.update(outputs[label]) - if 'Description' not in outputs[label]: + if "Description" not in outputs[label]: # don't propagate old Description try: - del new_json_info['Description'] + del new_json_info["Description"] except KeyError: pass - if 'Template' in outputs[label]: - template_key = outputs[label]['Template'] - if template_key in new_json_info['Sources']: + if "Template" in outputs[label]: + template_key = outputs[label]["Template"] + if template_key in new_json_info["Sources"]: # only if the pipeline config template key is entered as the 'Template' field # otherwise, skip this and take in the literal 'Template' string try: - new_json_info['Template'] = new_json_info['subjson'][template_key]['Description'] + new_json_info["Template"] = new_json_info[ + "subjson" + ][template_key]["Description"] except KeyError: pass try: - new_json_info['Resolution'] = new_json_info['subjson'][template_key]['Resolution'] + new_json_info["Resolution"] = new_json_info[ + "subjson" + ][template_key]["Resolution"] except KeyError: pass else: # don't propagate old Description try: - del new_json_info['Description'] + del new_json_info["Description"] except KeyError: pass - if 'Description' in new_json_info: - new_json_info['Description'] = ' '.join(new_json_info['Description'].split()) + if "Description" in new_json_info: + new_json_info["Description"] = " ".join( + new_json_info["Description"].split() + ) for sidecar_key, sidecar_value in sidecar_additions.items(): if sidecar_key not in new_json_info: new_json_info[sidecar_key] = sidecar_value try: - del new_json_info['subjson'] + del new_json_info["subjson"] except KeyError: pass if fork or len(opts) > 1 or len(all_opts) > 1: - if 'CpacVariant' not in new_json_info: - new_json_info['CpacVariant'] = {} + if "CpacVariant" not in new_json_info: + new_json_info["CpacVariant"] = {} raw_label = rpool.get_raw_label(label) - if raw_label not in new_json_info['CpacVariant']: - new_json_info['CpacVariant'][raw_label] = [] - new_json_info['CpacVariant'][raw_label].append(node_name) - - rpool.set_data(label, - connection[0], - connection[1], - new_json_info, - pipe_idx, node_name, fork) + if raw_label not in new_json_info["CpacVariant"]: + new_json_info["CpacVariant"][raw_label] = [] + new_json_info["CpacVariant"][raw_label].append( + node_name + ) + + rpool.set_data( + label, + connection[0], + connection[1], + new_json_info, + pipe_idx, + node_name, + fork, + ) wf, post_labels = rpool.post_process( - wf, label, connection, new_json_info, pipe_idx, - pipe_x, outs) + wf, + label, + connection, + new_json_info, + pipe_idx, + pipe_x, + outs, + ) if rpool.func_reg: for postlabel in post_labels: connection = (postlabel[1], postlabel[2]) - wf = rpool.derivative_xfm(wf, postlabel[0], - connection, - new_json_info, - pipe_idx, - pipe_x) + wf = rpool.derivative_xfm( + wf, + postlabel[0], + connection, + new_json_info, + pipe_idx, + pipe_x, + ) return wf @@ -1689,211 +1888,224 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): """ for block in node_blocks: - #new_pool = copy.deepcopy(strat_pool) + # new_pool = copy.deepcopy(strat_pool) for in_resource, val in interface.items(): if isinstance(val, tuple): - strat_pool.set_data(in_resource, val[0], val[1], {}, "", "", - fork=True)# - if 'sub_num' not in strat_pool.get_pool_info(): - strat_pool.set_pool_info({'sub_num': 0}) - sub_num = strat_pool.get_pool_info()['sub_num'] - - wf, outputs = block(wf, cfg, strat_pool, f'{pipe_num}-{sub_num}', opt)# + strat_pool.set_data( + in_resource, val[0], val[1], {}, "", "", fork=True + ) # + if "sub_num" not in strat_pool.get_pool_info(): + strat_pool.set_pool_info({"sub_num": 0}) + sub_num = strat_pool.get_pool_info()["sub_num"] + + wf, outputs = block(wf, cfg, strat_pool, f"{pipe_num}-{sub_num}", opt) # for out, val in outputs.items(): if out in interface and isinstance(interface[out], str): - strat_pool.set_data(interface[out], outputs[out][0], outputs[out][1], - {}, "", "") + strat_pool.set_data( + interface[out], outputs[out][0], outputs[out][1], {}, "", "" + ) else: - strat_pool.set_data(out, outputs[out][0], outputs[out][1], - {}, "", "") + strat_pool.set_data(out, outputs[out][0], outputs[out][1], {}, "", "") sub_num += 1 - strat_pool.set_pool_info({'sub_num': sub_num}) + strat_pool.set_pool_info({"sub_num": sub_num}) return (wf, strat_pool) -def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id): - if 'anat' not in data_paths: - print('No anatomical data present.') +def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): + if "anat" not in data_paths: + print("No anatomical data present.") return rpool - if 'creds_path' not in data_paths: - data_paths['creds_path'] = None + if "creds_path" not in data_paths: + data_paths["creds_path"] = None - anat_flow = create_anat_datasource(f'anat_T1w_gather_{part_id}_{ses_id}') + anat_flow = create_anat_datasource(f"anat_T1w_gather_{part_id}_{ses_id}") anat = {} - if type(data_paths['anat']) is str: - anat['T1']=data_paths['anat'] - elif 'T1w' in data_paths['anat']: - anat['T1']=data_paths['anat']['T1w'] + if type(data_paths["anat"]) is str: + anat["T1"] = data_paths["anat"] + elif "T1w" in data_paths["anat"]: + anat["T1"] = data_paths["anat"]["T1w"] - if 'T1' in anat: + if "T1" in anat: anat_flow.inputs.inputnode.set( subject=part_id, - anat=anat['T1'], - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path'], - img_type='anat' + anat=anat["T1"], + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + img_type="anat", ) - rpool.set_data('T1w', anat_flow, 'outputspec.anat', {}, - "", "anat_ingress") - - if 'T2w' in data_paths['anat']: - anat_flow_T2 = create_anat_datasource(f'anat_T2w_gather_{part_id}_{ses_id}') + rpool.set_data("T1w", anat_flow, "outputspec.anat", {}, "", "anat_ingress") + + if "T2w" in data_paths["anat"]: + anat_flow_T2 = create_anat_datasource(f"anat_T2w_gather_{part_id}_{ses_id}") anat_flow_T2.inputs.inputnode.set( subject=part_id, - anat=data_paths['anat']['T2w'], - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path'], - img_type='anat' + anat=data_paths["anat"]["T2w"], + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + img_type="anat", + ) + rpool.set_data("T2w", anat_flow_T2, "outputspec.anat", {}, "", "anat_ingress") + + if cfg.surface_analysis["freesurfer"]["ingress_reconall"]: + rpool = ingress_freesurfer( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id ) - rpool.set_data('T2w', anat_flow_T2, 'outputspec.anat', {}, - "", "anat_ingress") - if cfg.surface_analysis['freesurfer']['ingress_reconall']: - rpool = ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id) - return rpool -def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id): - - try: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + +def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): + try: + fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], part_id) except KeyError: - print('No FreeSurfer data present.') + print("No FreeSurfer data present.") return rpool - - #fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) + + # fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) if not os.path.exists(fs_path): - if 'sub' in part_id: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id.replace('sub-', '')) + if "sub" in part_id: + fs_path = os.path.join( + cfg.pipeline_setup["freesurfer_dir"], part_id.replace("sub-", "") + ) else: - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], ('sub-' + part_id)) - + fs_path = os.path.join( + cfg.pipeline_setup["freesurfer_dir"], ("sub-" + part_id) + ) + # patch for flo-specific data if not os.path.exists(fs_path): - subj_ses = part_id + '-' + ses_id - fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], subj_ses) + subj_ses = part_id + "-" + ses_id + fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], subj_ses) if not os.path.exists(fs_path): - print(f'No FreeSurfer data found for subject {part_id}') + print(f"No FreeSurfer data found for subject {part_id}") return rpool - + # Check for double nested subj names - if os.path.exists(os.path.join(fs_path, os.path.basename(fs_path))): + if os.path.exists(os.path.join(fs_path, os.path.basename(fs_path))): fs_path = os.path.join(fs_path, part_id) - fs_ingress = create_general_datasource('gather_freesurfer_dir') + fs_ingress = create_general_datasource("gather_freesurfer_dir") fs_ingress.inputs.inputnode.set( unique_id=unique_id, data=fs_path, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data("freesurfer-subject-dir", fs_ingress, 'outputspec.data', - {}, "", "freesurfer_config_ingress") + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + "freesurfer-subject-dir", + fs_ingress, + "outputspec.data", + {}, + "", + "freesurfer_config_ingress", + ) recon_outs = { - 'pipeline-fs_raw-average': 'mri/rawavg.mgz', - 'pipeline-fs_subcortical-seg': 'mri/aseg.mgz', - 'pipeline-fs_brainmask': 'mri/brainmask.mgz', - 'pipeline-fs_wmparc': 'mri/wmparc.mgz', - 'pipeline-fs_T1': 'mri/T1.mgz', - 'pipeline-fs_hemi-L_desc-surface_curv': 'surf/lh.curv', - 'pipeline-fs_hemi-R_desc-surface_curv': 'surf/rh.curv', - 'pipeline-fs_hemi-L_desc-surfaceMesh_pial': 'surf/lh.pial', - 'pipeline-fs_hemi-R_desc-surfaceMesh_pial': 'surf/rh.pial', - 'pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm': 'surf/lh.smoothwm', - 'pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm': 'surf/rh.smoothwm', - 'pipeline-fs_hemi-L_desc-surfaceMesh_sphere': 'surf/lh.sphere', - 'pipeline-fs_hemi-R_desc-surfaceMesh_sphere': 'surf/rh.sphere', - 'pipeline-fs_hemi-L_desc-surfaceMap_sulc': 'surf/lh.sulc', - 'pipeline-fs_hemi-R_desc-surfaceMap_sulc': 'surf/rh.sulc', - 'pipeline-fs_hemi-L_desc-surfaceMap_thickness': 'surf/lh.thickness', - 'pipeline-fs_hemi-R_desc-surfaceMap_thickness': 'surf/rh.thickness', - 'pipeline-fs_hemi-L_desc-surfaceMap_volume': 'surf/lh.volume', - 'pipeline-fs_hemi-R_desc-surfaceMap_volume': 'surf/rh.volume', - 'pipeline-fs_hemi-L_desc-surfaceMesh_white': 'surf/lh.white', - 'pipeline-fs_hemi-R_desc-surfaceMesh_white': 'surf/rh.white', - 'pipeline-fs_xfm': 'mri/transforms/talairach.lta' + "pipeline-fs_raw-average": "mri/rawavg.mgz", + "pipeline-fs_subcortical-seg": "mri/aseg.mgz", + "pipeline-fs_brainmask": "mri/brainmask.mgz", + "pipeline-fs_wmparc": "mri/wmparc.mgz", + "pipeline-fs_T1": "mri/T1.mgz", + "pipeline-fs_hemi-L_desc-surface_curv": "surf/lh.curv", + "pipeline-fs_hemi-R_desc-surface_curv": "surf/rh.curv", + "pipeline-fs_hemi-L_desc-surfaceMesh_pial": "surf/lh.pial", + "pipeline-fs_hemi-R_desc-surfaceMesh_pial": "surf/rh.pial", + "pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm": "surf/lh.smoothwm", + "pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm": "surf/rh.smoothwm", + "pipeline-fs_hemi-L_desc-surfaceMesh_sphere": "surf/lh.sphere", + "pipeline-fs_hemi-R_desc-surfaceMesh_sphere": "surf/rh.sphere", + "pipeline-fs_hemi-L_desc-surfaceMap_sulc": "surf/lh.sulc", + "pipeline-fs_hemi-R_desc-surfaceMap_sulc": "surf/rh.sulc", + "pipeline-fs_hemi-L_desc-surfaceMap_thickness": "surf/lh.thickness", + "pipeline-fs_hemi-R_desc-surfaceMap_thickness": "surf/rh.thickness", + "pipeline-fs_hemi-L_desc-surfaceMap_volume": "surf/lh.volume", + "pipeline-fs_hemi-R_desc-surfaceMap_volume": "surf/rh.volume", + "pipeline-fs_hemi-L_desc-surfaceMesh_white": "surf/lh.white", + "pipeline-fs_hemi-R_desc-surfaceMesh_white": "surf/rh.white", + "pipeline-fs_xfm": "mri/transforms/talairach.lta", } - + for key, outfile in recon_outs.items(): fullpath = os.path.join(fs_path, outfile) if os.path.exists(fullpath): - fs_ingress = create_general_datasource(f'gather_fs_{key}_dir') + fs_ingress = create_general_datasource(f"gather_fs_{key}_dir") fs_ingress.inputs.inputnode.set( unique_id=unique_id, data=fullpath, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path']) - rpool.set_data(key, fs_ingress, 'outputspec.data', - {}, "", f"fs_{key}_ingress") + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + key, fs_ingress, "outputspec.data", {}, "", f"fs_{key}_ingress" + ) else: - warnings.warn(str( - LookupError("\n[!] Path does not exist for " - f"{fullpath}.\n"))) - + warnings.warn( + str(LookupError("\n[!] Path does not exist for " f"{fullpath}.\n")) + ) + return rpool -def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id): - func_paths_dct = data_paths['func'] +def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): + func_paths_dct = data_paths["func"] - func_wf = create_func_datasource(func_paths_dct, rpool, - f'func_ingress_{part_id}_{ses_id}') + func_wf = create_func_datasource( + func_paths_dct, rpool, f"func_ingress_{part_id}_{ses_id}" + ) func_wf.inputs.inputnode.set( subject=part_id, - creds_path=data_paths['creds_path'], - dl_dir=cfg.pipeline_setup['working_directory']['path'] + creds_path=data_paths["creds_path"], + dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - func_wf.get_node('inputnode').iterables = \ - ("scan", list(func_paths_dct.keys())) - - rpool.set_data('subject', func_wf, 'outputspec.subject', {}, "", - "func_ingress") - rpool.set_data('bold', func_wf, 'outputspec.rest', {}, "", "func_ingress") - rpool.set_data('scan', func_wf, 'outputspec.scan', {}, "", "func_ingress") - rpool.set_data('scan-params', func_wf, 'outputspec.scan_params', {}, "", - "scan_params_ingress") - + func_wf.get_node("inputnode").iterables = ("scan", list(func_paths_dct.keys())) + + rpool.set_data("subject", func_wf, "outputspec.subject", {}, "", "func_ingress") + rpool.set_data("bold", func_wf, "outputspec.rest", {}, "", "func_ingress") + rpool.set_data("scan", func_wf, "outputspec.scan", {}, "", "func_ingress") + rpool.set_data( + "scan-params", func_wf, "outputspec.scan_params", {}, "", "scan_params_ingress" + ) + # TODO: CHECK FOR PARAMETERS - wf, rpool, diff, blip, fmap_rp_list = \ - ingress_func_metadata(wf, cfg, rpool, data_paths, part_id, - data_paths['creds_path'], ses_id) + wf, rpool, diff, blip, fmap_rp_list = ingress_func_metadata( + wf, cfg, rpool, data_paths, part_id, data_paths["creds_path"], ses_id + ) # Memoize list of local functional scans # TODO: handle S3 files # Skip S3 files for now local_func_scans = [ - func_paths_dct[scan]['scan'] for scan in func_paths_dct.keys() if not - func_paths_dct[scan]['scan'].startswith('s3://')] + func_paths_dct[scan]["scan"] + for scan in func_paths_dct.keys() + if not func_paths_dct[scan]["scan"].startswith("s3://") + ] if local_func_scans: # pylint: disable=protected-access wf._local_func_scans = local_func_scans - if cfg.pipeline_setup['Debugging']['verbose']: - verbose_logger = getLogger('engine') - verbose_logger.debug('local_func_scans: %s', local_func_scans) + if cfg.pipeline_setup["Debugging"]["verbose"]: + verbose_logger = getLogger("engine") + verbose_logger.debug("local_func_scans: %s", local_func_scans) del local_func_scans return (wf, rpool, diff, blip, fmap_rp_list) -def ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, creds_path=None): - - dir_path = data_paths['derivatives_dir'] +def ingress_output_dir( + wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, creds_path=None +): + dir_path = data_paths["derivatives_dir"] print(f"\nPulling outputs from {dir_path}.\n") - anat = os.path.join(dir_path, 'anat') - func = os.path.join(dir_path, 'func') + anat = os.path.join(dir_path, "anat") + func = os.path.join(dir_path, "func") - exts = ['.nii', '.gz', '.mat', '.1D', '.txt', '.csv', '.rms', '.tsv'] + exts = [".nii", ".gz", ".mat", ".1D", ".txt", ".csv", ".rms", ".tsv"] outdir_anat = [] outdir_func = [] @@ -1906,89 +2118,103 @@ def ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, c for ext in exts: if ext in filename: if subdir == anat: - outdir_anat.append(os.path.join(subdir, - filename)) + outdir_anat.append(os.path.join(subdir, filename)) else: - outdir_func.append(os.path.join(subdir, - filename)) + outdir_func.append(os.path.join(subdir, filename)) - # Add derivatives directory to rpool - ingress = create_general_datasource(f'gather_derivatives_dir') + # Add derivatives directory to rpool + ingress = create_general_datasource("gather_derivatives_dir") ingress.inputs.inputnode.set( - unique_id=unique_id, - data=dir_path, - creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] - ) - rpool.set_data("derivatives-dir", ingress, 'outputspec.data', - {}, "", "outdir_config_ingress") + unique_id=unique_id, + data=dir_path, + creds_path=creds_path, + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + "derivatives-dir", ingress, "outputspec.data", {}, "", "outdir_config_ingress" + ) for subdir in [outdir_anat, outdir_func]: for filepath in subdir: filename = str(filepath) for ext in exts: - filename = filename.split("/")[-1].replace(ext, '') + filename = filename.split("/")[-1].replace(ext, "") - data_label = filename.split(unique_id)[1].lstrip('_') + data_label = filename.split(unique_id)[1].lstrip("_") if len(filename) == len(data_label): - raise Exception('\n\n[!] Possibly wrong participant or ' - 'session in this directory?\n\n' - f'Filepath: {filepath}\n\n') + raise Exception( + "\n\n[!] Possibly wrong participant or " + "session in this directory?\n\n" + f"Filepath: {filepath}\n\n" + ) - bidstag = '' - for tag in data_label.split('_'): - for prefix in ['task-', 'run-', 'acq-', 'rec']: + bidstag = "" + for tag in data_label.split("_"): + for prefix in ["task-", "run-", "acq-", "rec"]: if tag.startswith(prefix): - bidstag += f'{tag}_' - data_label = data_label.replace(f'{tag}_', '') + bidstag += f"{tag}_" + data_label = data_label.replace(f"{tag}_", "") data_label, json = strip_template(data_label, dir_path, filename) - rpool, json_info, pipe_idx, node_name, data_label = \ - json_outdir_ingress(rpool, filepath, \ - exts, data_label, json) + rpool, json_info, pipe_idx, node_name, data_label = json_outdir_ingress( + rpool, filepath, exts, data_label, json + ) - if ('template' in data_label and not json_info['Template'] == \ - cfg.pipeline_setup['outdir_ingress']['Template']): + if ( + "template" in data_label + and not json_info["Template"] + == cfg.pipeline_setup["outdir_ingress"]["Template"] + ): continue # Rename confounds to avoid confusion in nuisance regression - if data_label.endswith('desc-confounds_timeseries'): - data_label = 'pipeline-ingress_desc-confounds_timeseries' + if data_label.endswith("desc-confounds_timeseries"): + data_label = "pipeline-ingress_desc-confounds_timeseries" if len(bidstag) > 1: # Remove tail symbol bidstag = bidstag[:-1] - if bidstag.startswith('task-'): - bidstag = bidstag.replace('task-', '') + if bidstag.startswith("task-"): + bidstag = bidstag.replace("task-", "") # Rename bold mask for CPAC naming convention # and to avoid collision with anat brain mask - if data_label.endswith('desc-brain_mask') and filepath in outdir_func: - data_label = data_label.replace('brain_mask', 'bold_mask') + if data_label.endswith("desc-brain_mask") and filepath in outdir_func: + data_label = data_label.replace("brain_mask", "bold_mask") try: pipe_x = rpool.get_pipe_number(pipe_idx) except ValueError: pipe_x = len(rpool.pipe_list) if filepath in outdir_anat: - ingress = create_general_datasource(f'gather_anat_outdir_{str(data_label)}_{pipe_x}') + ingress = create_general_datasource( + f"gather_anat_outdir_{data_label!s}_{pipe_x}" + ) ingress.inputs.inputnode.set( unique_id=unique_id, data=filepath, creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + data_label, + ingress, + "outputspec.data", + json_info, + pipe_idx, + node_name, + f"outdir_{data_label}_ingress", + inject=True, ) - rpool.set_data(data_label, ingress, 'outputspec.data', json_info, - pipe_idx, node_name, f"outdir_{data_label}_ingress", inject=True) else: - if data_label.endswith('desc-preproc_bold'): + if data_label.endswith("desc-preproc_bold"): func_key = data_label func_dict[bidstag] = {} - func_dict[bidstag]['scan'] = str(filepath) - func_dict[bidstag]['scan_parameters'] = json_info - func_dict[bidstag]['pipe_idx'] = pipe_idx - if data_label.endswith('desc-brain_mask'): - data_label = data_label.replace('brain_mask', 'bold_mask') + func_dict[bidstag]["scan"] = str(filepath) + func_dict[bidstag]["scan_parameters"] = json_info + func_dict[bidstag]["pipe_idx"] = pipe_idx + if data_label.endswith("desc-brain_mask"): + data_label = data_label.replace("brain_mask", "bold_mask") try: func_paths[data_label].append(filepath) except: @@ -1996,166 +2222,193 @@ def ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, ses_id, c func_paths[data_label].append(filepath) if func_dict: - wf, rpool = func_outdir_ingress(wf, cfg, func_dict, rpool, unique_id, \ - creds_path, part_id, func_key, func_paths) + wf, rpool = func_outdir_ingress( + wf, + cfg, + func_dict, + rpool, + unique_id, + creds_path, + part_id, + func_key, + func_paths, + ) - if cfg.surface_analysis['freesurfer']['ingress_reconall']: - rpool = ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, - ses_id) + if cfg.surface_analysis["freesurfer"]["ingress_reconall"]: + rpool = ingress_freesurfer( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id + ) return wf, rpool + def json_outdir_ingress(rpool, filepath, exts, data_label, json): - desc_val = None - for tag in data_label.split('_'): - if 'desc-' in tag: + for tag in data_label.split("_"): + if "desc-" in tag: desc_val = tag break jsonpath = str(filepath) for ext in exts: - jsonpath = jsonpath.replace(ext, '') + jsonpath = jsonpath.replace(ext, "") jsonpath = f"{jsonpath}.json" if not os.path.exists(jsonpath): - print(f'\n\n[!] No JSON found for file {filepath}.\nCreating ' - f'{jsonpath}..\n\n') + print( + f"\n\n[!] No JSON found for file {filepath}.\nCreating " f"{jsonpath}..\n\n" + ) json_info = { - 'Description': 'This data was generated elsewhere and ' - 'supplied by the user into this C-PAC run\'s ' - 'output directory. This JSON file was ' - 'automatically generated by C-PAC because a ' - 'JSON file was not supplied with the data.' + "Description": "This data was generated elsewhere and " + "supplied by the user into this C-PAC run's " + "output directory. This JSON file was " + "automatically generated by C-PAC because a " + "JSON file was not supplied with the data." } json_info = {**json_info, **json} write_output_json(json_info, jsonpath) else: json_info = read_json(jsonpath) json_info = {**json_info, **json} - if 'CpacProvenance' in json_info: + if "CpacProvenance" in json_info: if desc_val: # it's a C-PAC output, let's check for pipe_idx/strat integer # suffixes in the desc- entries. only_desc = str(desc_val) - + if only_desc[-1].isdigit(): for idx in range(0, 3): # let's stop at 3, please don't run >999 strategies okay? if only_desc[-1].isdigit(): only_desc = only_desc[:-1] - - if only_desc[-1] == '-': - only_desc = only_desc.rstrip('-') - else: - raise Exception('\n[!] Something went wrong with either ' - 'reading in the output directory or when ' - 'it was written out previously.\n\nGive ' - 'this to your friendly local C-PAC ' - f'developer:\n\n{str(data_label)}\n') - # remove the integer at the end of the desc-* variant, we will + if only_desc[-1] == "-": + only_desc = only_desc.rstrip("-") + else: + raise Exception( + "\n[!] Something went wrong with either " + "reading in the output directory or when " + "it was written out previously.\n\nGive " + "this to your friendly local C-PAC " + f"developer:\n\n{data_label!s}\n" + ) + + # remove the integer at the end of the desc-* variant, we will # get the unique pipe_idx from the CpacProvenance below data_label = data_label.replace(desc_val, only_desc) # preserve cpac provenance/pipe_idx - pipe_idx = rpool.generate_prov_string(json_info['CpacProvenance']) + pipe_idx = rpool.generate_prov_string(json_info["CpacProvenance"]) node_name = "" - + else: - json_info['CpacProvenance'] = [f'{data_label}:Non-C-PAC Origin: {filepath}'] - if not 'Description' in json_info: - json_info['Description'] = 'This data was generated elsewhere and ' \ - 'supplied by the user into this C-PAC run\'s '\ - 'output directory. This JSON file was '\ - 'automatically generated by C-PAC because a '\ - 'JSON file was not supplied with the data.' - pipe_idx = rpool.generate_prov_string(json_info['CpacProvenance']) + json_info["CpacProvenance"] = [f"{data_label}:Non-C-PAC Origin: {filepath}"] + if "Description" not in json_info: + json_info["Description"] = ( + "This data was generated elsewhere and " + "supplied by the user into this C-PAC run's " + "output directory. This JSON file was " + "automatically generated by C-PAC because a " + "JSON file was not supplied with the data." + ) + pipe_idx = rpool.generate_prov_string(json_info["CpacProvenance"]) node_name = f"{data_label}_ingress" return rpool, json_info, pipe_idx, node_name, data_label -def func_outdir_ingress(wf, cfg, func_dict, rpool, unique_id, creds_path, part_id, key, \ - func_paths): + +def func_outdir_ingress( + wf, cfg, func_dict, rpool, unique_id, creds_path, part_id, key, func_paths +): pipe_x = len(rpool.pipe_list) - exts = ['.nii', '.gz', '.mat', '.1D', '.txt', '.csv', '.rms', '.tsv'] - ingress = create_func_datasource(func_dict, rpool, f'gather_func_outdir_{key}_{pipe_x}') + exts = [".nii", ".gz", ".mat", ".1D", ".txt", ".csv", ".rms", ".tsv"] + ingress = create_func_datasource( + func_dict, rpool, f"gather_func_outdir_{key}_{pipe_x}" + ) ingress.inputs.inputnode.set( subject=unique_id, creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] + dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - rpool.set_data('subject', ingress, 'outputspec.subject', {}, "", - "func_ingress") - ingress.get_node('inputnode').iterables = \ - ("scan", list(func_dict.keys())) - rpool.set_data(key, ingress, 'outputspec.rest', {}, "", - "func_ingress") - - rpool.set_data('scan', ingress, 'outputspec.scan', {}, "", 'func_ingress') - rpool.set_data('scan-params', ingress, 'outputspec.scan_params', {}, "", - "scan_params_ingress") - wf, rpool, diff, blip, fmap_rp_list = ingress_func_metadata(wf, cfg, \ - rpool, func_dict, part_id, creds_path, key) - + rpool.set_data("subject", ingress, "outputspec.subject", {}, "", "func_ingress") + ingress.get_node("inputnode").iterables = ("scan", list(func_dict.keys())) + rpool.set_data(key, ingress, "outputspec.rest", {}, "", "func_ingress") + + rpool.set_data("scan", ingress, "outputspec.scan", {}, "", "func_ingress") + rpool.set_data( + "scan-params", ingress, "outputspec.scan_params", {}, "", "scan_params_ingress" + ) + wf, rpool, diff, blip, fmap_rp_list = ingress_func_metadata( + wf, cfg, rpool, func_dict, part_id, creds_path, key + ) + # Have to do it this weird way to save the parsed BIDS tag & filepath - mask_paths_key = 'desc-bold_mask' if 'desc-bold_mask' in func_paths else \ - 'space-template_desc-bold_mask' - ts_paths_key = 'pipeline-ingress_desc-confounds_timeseries' + mask_paths_key = ( + "desc-bold_mask" + if "desc-bold_mask" in func_paths + else "space-template_desc-bold_mask" + ) + ts_paths_key = "pipeline-ingress_desc-confounds_timeseries" # Connect func data with approproate scan name - iterables = pe.Node(Function(input_names=['scan', - 'mask_paths', - 'ts_paths'], - output_names=['out_scan', - 'mask', - 'confounds'], - function=set_iterables), - name=f'set_iterables_{pipe_x}') + iterables = pe.Node( + Function( + input_names=["scan", "mask_paths", "ts_paths"], + output_names=["out_scan", "mask", "confounds"], + function=set_iterables, + ), + name=f"set_iterables_{pipe_x}", + ) iterables.inputs.mask_paths = func_paths[mask_paths_key] iterables.inputs.ts_paths = func_paths[ts_paths_key] - wf.connect(ingress, 'outputspec.scan', iterables, 'scan') + wf.connect(ingress, "outputspec.scan", iterables, "scan") for key in func_paths: if key == mask_paths_key or key == ts_paths_key: - ingress_func = create_general_datasource(f'ingress_func_data_{key}') + ingress_func = create_general_datasource(f"ingress_func_data_{key}") ingress_func.inputs.inputnode.set( unique_id=unique_id, creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path']) - wf.connect(iterables, 'out_scan', ingress_func, 'inputnode.scan') + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + wf.connect(iterables, "out_scan", ingress_func, "inputnode.scan") if key == mask_paths_key: - wf.connect(iterables, 'mask', ingress_func, 'inputnode.data') - rpool.set_data(key, ingress_func, 'inputnode.data', {}, "", f"outdir_{key}_ingress") + wf.connect(iterables, "mask", ingress_func, "inputnode.data") + rpool.set_data( + key, ingress_func, "inputnode.data", {}, "", f"outdir_{key}_ingress" + ) elif key == ts_paths_key: - wf.connect(iterables, 'confounds', ingress_func, 'inputnode.data') - rpool.set_data(key, ingress_func, 'inputnode.data', {}, "", f"outdir_{key}_ingress") + wf.connect(iterables, "confounds", ingress_func, "inputnode.data") + rpool.set_data( + key, ingress_func, "inputnode.data", {}, "", f"outdir_{key}_ingress" + ) return wf, rpool + def set_iterables(scan, mask_paths=None, ts_paths=None): - # match scan with filepath to get filepath mask_path = [path for path in mask_paths if scan in path] ts_path = [path for path in ts_paths if scan in path] - return (scan, mask_path[0], ts_path[0]) + return (scan, mask_path[0], ts_path[0]) + def strip_template(data_label, dir_path, filename): - json = {} - # rename to template - for prefix in ['space-', 'from-', 'to-']: - for bidstag in data_label.split('_'): + # rename to template + for prefix in ["space-", "from-", "to-"]: + for bidstag in data_label.split("_"): if bidstag.startswith(prefix): - template_key, template_val = bidstag.split('-') + template_key, template_val = bidstag.split("-") template_name, _template_desc = lookup_identifier(template_val) if template_name: - json['Template'] = template_val - data_label = data_label.replace(template_val, 'template') - elif bidstag.startswith('res-'): - res_key, res_val = bidstag.split('-') - json['Resolution'] = res_val - data_label = data_label.replace(bidstag, '') - if data_label.find('__'): data_label = data_label.replace('__', '_') + json["Template"] = template_val + data_label = data_label.replace(template_val, "template") + elif bidstag.startswith("res-"): + res_key, res_val = bidstag.split("-") + json["Resolution"] = res_val + data_label = data_label.replace(bidstag, "") + if data_label.find("__"): + data_label = data_label.replace("__", "_") return data_label, json @@ -2163,18 +2416,16 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well - import pkg_resources as p import pandas as pd - import ast + import pkg_resources as p - template_csv = p.resource_filename('CPAC', 'resources/cpac_templates.csv') + template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) - + for row in template_df.itertuples(): - key = row.Key val = row.Pipeline_Config_Entry - val = cfg.get_nested(cfg, [x.lstrip() for x in val.split(',')]) + val = cfg.get_nested(cfg, [x.lstrip() for x in val.split(",")]) resolution = row.Intended_Resolution_Config_Entry desc = row.Description @@ -2182,72 +2433,95 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): continue if resolution: - res_keys = [x.lstrip() for x in resolution.split(',')] + res_keys = [x.lstrip() for x in resolution.split(",")] tag = res_keys[-1] - json_info = {} - - if '$FSLDIR' in val: - val = val.replace('$FSLDIR', cfg.pipeline_setup[ - 'system_config']['FSLDIR']) - if '$priors_path' in val: - priors_path = cfg.segmentation['tissue_segmentation']['FSL-FAST']['use_priors']['priors_path'] or '' - if '$FSLDIR' in priors_path: - priors_path = priors_path.replace('$FSLDIR', cfg.pipeline_setup['system_config']['FSLDIR']) - val = val.replace('$priors_path', priors_path) - if '${resolution_for_anat}' in val: - val = val.replace('${resolution_for_anat}', cfg.registration_workflows['anatomical_registration']['resolution_for_anat']) - if '${func_resolution}' in val: - val = val.replace('${func_resolution}', cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'output_resolution'][tag]) + json_info = {} + + if "$FSLDIR" in val: + val = val.replace("$FSLDIR", cfg.pipeline_setup["system_config"]["FSLDIR"]) + if "$priors_path" in val: + priors_path = ( + cfg.segmentation["tissue_segmentation"]["FSL-FAST"]["use_priors"][ + "priors_path" + ] + or "" + ) + if "$FSLDIR" in priors_path: + priors_path = priors_path.replace( + "$FSLDIR", cfg.pipeline_setup["system_config"]["FSLDIR"] + ) + val = val.replace("$priors_path", priors_path) + if "${resolution_for_anat}" in val: + val = val.replace( + "${resolution_for_anat}", + cfg.registration_workflows["anatomical_registration"][ + "resolution_for_anat" + ], + ) + if "${func_resolution}" in val: + val = val.replace( + "${func_resolution}", + cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["output_resolution"][tag], + ) if desc: template_name, _template_desc = lookup_identifier(val) if template_name: desc = f"{template_name} - {desc}" - json_info['Description'] = f"{desc} - {val}" + json_info["Description"] = f"{desc} - {val}" if resolution: resolution = cfg.get_nested(cfg, res_keys) - json_info['Resolution'] = resolution - - resampled_template = pe.Node(Function(input_names=['resolution', - 'template', - 'template_name', - 'tag'], - output_names=['resampled_template'], - function=resolve_resolution, - as_module=True), - name='resampled_' + key) + json_info["Resolution"] = resolution + + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + key, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = val resampled_template.inputs.template_name = key resampled_template.inputs.tag = tag - + # the set_data below is set up a little differently, because we are # injecting and also over-writing already-existing entries # other alternative would have been to ingress into the # resampled_template node from the already existing entries, but we # didn't do that here - rpool.set_data(key, - resampled_template, - 'resampled_template', - json_info, "", - "template_resample") #, inject=True) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually - - else: - if val: - config_ingress = create_general_datasource(f'gather_{key}') - config_ingress.inputs.inputnode.set( - unique_id=unique_id, - data=val, - creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] - ) - rpool.set_data(key, config_ingress, 'outputspec.data', - json_info, "", f"{key}_config_ingress") + rpool.set_data( + key, + resampled_template, + "resampled_template", + json_info, + "", + "template_resample", + ) # , inject=True) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually + + elif val: + config_ingress = create_general_datasource(f"gather_{key}") + config_ingress.inputs.inputnode.set( + unique_id=unique_id, + data=val, + creds_path=creds_path, + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) + rpool.set_data( + key, + config_ingress, + "outputspec.data", + json_info, + "", + f"{key}_config_ingress", + ) # templates, resampling from config - ''' + """ template_keys = [ ("anat", ["network_centrality", "template_specification_file"]), ("anat", ["nuisance_corrections", "2-nuisance_regression", @@ -2330,13 +2604,13 @@ def _set_nested(attr, keys): map_node=True ) cfg.set_nested(cfg, key, node) - ''' + """ return rpool def initiate_rpool(wf, cfg, data_paths=None, part_id=None): - ''' + """ data_paths format: {'anat': { @@ -2355,21 +2629,20 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): 'subject_id': 'sub-01', 'unique_id': 'ses-1', 'derivatives_dir': '{derivatives_dir path}'} - ''' - + """ # TODO: refactor further, integrate with the ingress_data functionality # TODO: used for BIDS-Derivatives (below), and possible refactoring of # TODO: the raw data config to use 'T1w' label instead of 'anat' etc. if data_paths: - part_id = data_paths['subject_id'] - ses_id = data_paths['unique_id'] - if 'creds_path' not in data_paths: + part_id = data_paths["subject_id"] + ses_id = data_paths["unique_id"] + if "creds_path" not in data_paths: creds_path = None else: - creds_path = data_paths['creds_path'] - unique_id = f'{part_id}_{ses_id}' - + creds_path = data_paths["creds_path"] + unique_id = f"{part_id}_{ses_id}" + elif part_id: unique_id = part_id creds_path = None @@ -2378,18 +2651,29 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): if data_paths: # ingress outdir - try: - if data_paths['derivatives_dir'] and cfg.pipeline_setup['outdir_ingress']['run']: - wf, rpool = \ - ingress_output_dir(wf, cfg, rpool, unique_id, data_paths, part_id, \ - ses_id, creds_path=None) + try: + if ( + data_paths["derivatives_dir"] + and cfg.pipeline_setup["outdir_ingress"]["run"] + ): + wf, rpool = ingress_output_dir( + wf, + cfg, + rpool, + unique_id, + data_paths, + part_id, + ses_id, + creds_path=None, + ) except: - rpool = ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, - part_id, ses_id) - if 'func' in data_paths: - wf, rpool, diff, blip, fmap_rp_list = \ - ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, - part_id, ses_id) + rpool = ingress_raw_anat_data( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id + ) + if "func" in data_paths: + wf, rpool, diff, blip, fmap_rp_list = ingress_raw_func_data( + wf, rpool, cfg, data_paths, unique_id, part_id, ses_id + ) # grab any file paths from the pipeline config YAML rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) @@ -2401,45 +2685,42 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): def run_node_blocks(blocks, data_paths, cfg=None): import os + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.engine import NodeBlock if not cfg: cfg = { - 'pipeline_setup': { - 'working_directory': { - 'path': os.getcwd() - }, - 'log_directory': { - 'path': os.getcwd() - } + "pipeline_setup": { + "working_directory": {"path": os.getcwd()}, + "log_directory": {"path": os.getcwd()}, } } # TODO: WE HAVE TO PARSE OVER UNIQUE ID'S!!! _, rpool = initiate_rpool(cfg, data_paths) - wf = pe.Workflow(name='node_blocks') - wf.base_dir = cfg.pipeline_setup['working_directory']['path'] - wf.config['execution'] = { - 'hash_method': 'timestamp', - 'crashdump_dir': cfg.pipeline_setup['log_directory']['path'] + wf = pe.Workflow(name="node_blocks") + wf.base_dir = cfg.pipeline_setup["working_directory"]["path"] + wf.config["execution"] = { + "hash_method": "timestamp", + "crashdump_dir": cfg.pipeline_setup["log_directory"]["path"], } run_blocks = [] - if rpool.check_rpool('desc-preproc_T1w'): + if rpool.check_rpool("desc-preproc_T1w"): print("Preprocessed T1w found, skipping anatomical preprocessing.") else: run_blocks += blocks[0] - if rpool.check_rpool('desc-preproc_bold'): + if rpool.check_rpool("desc-preproc_bold"): print("Preprocessed BOLD found, skipping functional preprocessing.") else: run_blocks += blocks[1] for block in run_blocks: - wf = NodeBlock(block, debug=cfg['pipeline_setup', 'Debugging', - 'verbose']).connect_block( - wf, cfg, rpool) + wf = NodeBlock( + block, debug=cfg["pipeline_setup", "Debugging", "verbose"] + ).connect_block(wf, cfg, rpool) rpool.gather_pipes(wf, cfg) wf.run() @@ -2477,6 +2758,7 @@ class NodeData: ... print(str(lookup_error).strip().split('\n')[0].strip()) [!] C-PAC says: None of the listed resources are in the resource pool: """ + # pylint: disable=too-few-public-methods def __init__(self, strat_pool=None, resource=None, **kwargs): self.node = NotImplemented diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 9f903f1cad..eb83a9107f 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -16,35 +16,55 @@ # License along with C-PAC. If not, see . """Validation schema for C-PAC pipeline configurations""" + # pylint: disable=too-many-lines -import re from itertools import chain, permutations +import re +from subprocess import CalledProcessError + import numpy as np from pathvalidate import sanitize_filename -from subprocess import CalledProcessError -from voluptuous import All, ALLOW_EXTRA, Any, BooleanInvalid, Capitalize, \ - Coerce, CoerceInvalid, ExclusiveInvalid, In, Length, \ - LengthInvalid, Lower, Match, Maybe, MultipleInvalid, \ - Optional, Range, Required, Schema, Title +from voluptuous import ( + All, + ALLOW_EXTRA, + Any, + BooleanInvalid, + Capitalize, + Coerce, + CoerceInvalid, + ExclusiveInvalid, + In, + Length, + LengthInvalid, + Lower, + Match, + Maybe, + MultipleInvalid, + Optional, + Range, + Required, + Schema, + Title, +) + from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.utils import YAML_BOOLS # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits -SCIENTIFIC_NOTATION_STR_REGEX = r'^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$' +SCIENTIFIC_NOTATION_STR_REGEX = r"^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$" # (1 or more digits, optional decimal, 0 or more lowercase characters (units)) # ('x', # 1 or more digits, optional decimal, 0 or more lowercase characters (units) # ) 0 or more times -RESOLUTION_REGEX = r'^[0-9]+(\.[0-9]*){0,1}[a-z]*' \ - r'(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$' +RESOLUTION_REGEX = r"^[0-9]+(\.[0-9]*){0,1}[a-z]*" r"(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$" Number = Any(float, int, All(str, Match(SCIENTIFIC_NOTATION_STR_REGEX))) def str_to_bool1_1(x): # pylint: disable=invalid-name - '''Convert strings to Booleans for YAML1.1 syntax + """Convert strings to Booleans for YAML1.1 syntax Ref https://yaml.org/type/bool.html @@ -55,7 +75,7 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name Returns ------- bool - ''' + """ if isinstance(x, str): try: x = float(x) @@ -63,201 +83,240 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name return False except ValueError: pass - x = (True if str(x).lower() in YAML_BOOLS[True] else - False if str(x).lower() in YAML_BOOLS[False] else x) + x = ( + True + if str(x).lower() in YAML_BOOLS[True] + else False + if str(x).lower() in YAML_BOOLS[False] + else x + ) if not isinstance(x, (bool, int)): - raise BooleanInvalid('Type boolean value was expected, type ' - f'{getattr(type(x), "__name__", str(type(x)))} ' - f'value\n\n{x}\n\nwas provided') + raise BooleanInvalid( + 'Type boolean value was expected, type ' + f'{getattr(type(x), "__name__", str(type(x)))} ' + f'value\n\n{x}\n\nwas provided' + ) return bool(x) bool1_1 = All(str_to_bool1_1, bool) forkable = All(Coerce(ListFromItem), [bool1_1], Length(max=2)) valid_options = { - 'acpc': { - 'target': ['brain', 'whole-head'] - }, - 'brain_extraction': { - 'using': ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', - 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', - 'FreeSurfer-ABCD', 'FreeSurfer-Brainmask'] + "acpc": {"target": ["brain", "whole-head"]}, + "brain_extraction": { + "using": [ + "3dSkullStrip", + "BET", + "UNet", + "niworkflows-ants", + "FreeSurfer-BET-Tight", + "FreeSurfer-BET-Loose", + "FreeSurfer-ABCD", + "FreeSurfer-Brainmask", + ] }, - 'centrality': { - 'method_options': ['degree_centrality', 'eigenvector_centrality', - 'local_functional_connectivity_density'], - 'threshold_options': ['Significance threshold', 'Sparsity threshold', - 'Correlation threshold'], - 'weight_options': ['Binarized', 'Weighted'] + "centrality": { + "method_options": [ + "degree_centrality", + "eigenvector_centrality", + "local_functional_connectivity_density", + ], + "threshold_options": [ + "Significance threshold", + "Sparsity threshold", + "Correlation threshold", + ], + "weight_options": ["Binarized", "Weighted"], }, - 'motion_correction': ['3dvolreg', 'mcflirt'], - 'sca': { - 'roi_paths': ['Avg', 'DualReg', 'MultReg'], + "motion_correction": ["3dvolreg", "mcflirt"], + "sca": { + "roi_paths": ["Avg", "DualReg", "MultReg"], }, - 'segmentation': { - 'using': ['FSL-FAST', 'ANTs_Prior_Based', 'Template_Based'], - 'template': ['EPI_Template', 'T1_Template'], + "segmentation": { + "using": ["FSL-FAST", "ANTs_Prior_Based", "Template_Based"], + "template": ["EPI_Template", "T1_Template"], }, - 'timeseries': { - 'roi_paths': ['Avg', 'Voxel', 'SpatialReg'], + "timeseries": { + "roi_paths": ["Avg", "Voxel", "SpatialReg"], }, - 'connectivity_matrix': { - 'using': ['AFNI', 'Nilearn', 'ndmg'], - 'measure': ['Pearson', 'Partial', 'Spearman', 'MGC', - # 'TangentEmbed' # "Skip tangent embedding for now" + "connectivity_matrix": { + "using": ["AFNI", "Nilearn", "ndmg"], + "measure": [ + "Pearson", + "Partial", + "Spearman", + "MGC", + # 'TangentEmbed' # "Skip tangent embedding for now" ], }, - 'Regressors': { - 'CompCor': { - 'degree': int, - 'erode_mask_mm': bool1_1, - 'summary': { - 'method': str, - 'components': int, - 'filter': str, + "Regressors": { + "CompCor": { + "degree": int, + "erode_mask_mm": bool1_1, + "summary": { + "method": str, + "components": int, + "filter": str, }, - 'threshold': str, - 'tissues': [str], - 'extraction_resolution': int + "threshold": str, + "tissues": [str], + "extraction_resolution": int, }, - 'segmentation': { - 'erode_mask': bool1_1, - 'extraction_resolution': Any( - int, float, 'Functional', All(str, Match(RESOLUTION_REGEX)) - ), - 'include_delayed': bool1_1, - 'include_delayed_squared': bool1_1, - 'include_squared': bool1_1, - 'summary': Any( - str, {'components': int, 'method': str} + "segmentation": { + "erode_mask": bool1_1, + "extraction_resolution": Any( + int, float, "Functional", All(str, Match(RESOLUTION_REGEX)) ), + "include_delayed": bool1_1, + "include_delayed_squared": bool1_1, + "include_squared": bool1_1, + "summary": Any(str, {"components": int, "method": str}), }, }, - 'target_space': ['Native', 'Template'] + "target_space": ["Native", "Template"], } -valid_options['space'] = list({option.lower() for option in - valid_options['target_space']}) +valid_options["space"] = list( + {option.lower() for option in valid_options["target_space"]} +) mutex = { # mutually exclusive booleans - 'FSL-BET': { + "FSL-BET": { # exactly zero or one of each of the following can be True for FSL-BET - 'mutex': ['reduce_bias', 'robust', 'padding', 'remove_eyes', - 'surfaces'], + "mutex": ["reduce_bias", "robust", "padding", "remove_eyes", "surfaces"], # the remaining keys: validators for FSL-BET - 'rem': { - 'frac': float, - 'mesh_boolean': bool1_1, - 'outline': bool1_1, - 'radius': int, - 'skull': bool1_1, - 'threshold': bool1_1, - 'vertical_gradient': Range(min=-1, max=1, min_included=False, - max_included=False), - 'functional_mean_thr': { - 'run': bool1_1, - 'threshold_value': Maybe(int), + "rem": { + "frac": float, + "mesh_boolean": bool1_1, + "outline": bool1_1, + "radius": int, + "skull": bool1_1, + "threshold": bool1_1, + "vertical_gradient": Range( + min=-1, max=1, min_included=False, max_included=False + ), + "functional_mean_thr": { + "run": bool1_1, + "threshold_value": Maybe(int), }, - 'functional_mean_bias_correction': bool1_1, - } + "functional_mean_bias_correction": bool1_1, + }, } } ANTs_parameter_transforms = { - 'gradientStep': Number, - 'metric': { - 'type': str, - 'metricWeight': int, - 'numberOfBins': int, - 'samplingStrategy': str, - 'samplingPercentage': Number, - 'radius': Number, + "gradientStep": Number, + "metric": { + "type": str, + "metricWeight": int, + "numberOfBins": int, + "samplingStrategy": str, + "samplingPercentage": Number, + "radius": Number, }, - 'convergence': { - 'iteration': All(str, Match(RESOLUTION_REGEX)), - 'convergenceThreshold': Number, - 'convergenceWindowSize': int, + "convergence": { + "iteration": All(str, Match(RESOLUTION_REGEX)), + "convergenceThreshold": Number, + "convergenceWindowSize": int, }, - 'smoothing-sigmas': All(str, Match(RESOLUTION_REGEX)), - 'shrink-factors': All(str, Match(RESOLUTION_REGEX)), - 'use-histogram-matching': bool1_1, - 'updateFieldVarianceInVoxelSpace': Number, - 'totalFieldVarianceInVoxelSpace': Number, - 'winsorize-image-intensities': { - 'lowerQuantile': float, - 'upperQuantile': float, + "smoothing-sigmas": All(str, Match(RESOLUTION_REGEX)), + "shrink-factors": All(str, Match(RESOLUTION_REGEX)), + "use-histogram-matching": bool1_1, + "updateFieldVarianceInVoxelSpace": Number, + "totalFieldVarianceInVoxelSpace": Number, + "winsorize-image-intensities": { + "lowerQuantile": float, + "upperQuantile": float, }, } -ANTs_parameters = [Any( - { - 'collapse-output-transforms': int - }, { - 'dimensionality': int - }, { - 'initial-moving-transform': { - 'initializationFeature': int, +ANTs_parameters = [ + Any( + {"collapse-output-transforms": int}, + {"dimensionality": int}, + { + "initial-moving-transform": { + "initializationFeature": int, + }, }, - }, { - 'transforms': [Any({ - 'Rigid': ANTs_parameter_transforms, - }, { - 'Affine': ANTs_parameter_transforms, - }, { - 'SyN': ANTs_parameter_transforms, - })], - }, { - 'verbose': Any(Coerce(int), In({0, 1})), - }, { - 'float': Any(Coerce(int), In({0, 1})), - }, { - 'masks': { - 'fixed_image_mask': bool1_1, - 'moving_image_mask': bool1_1, + { + "transforms": [ + Any( + { + "Rigid": ANTs_parameter_transforms, + }, + { + "Affine": ANTs_parameter_transforms, + }, + { + "SyN": ANTs_parameter_transforms, + }, + ) + ], + }, + { + "verbose": Any(Coerce(int), In({0, 1})), + }, + { + "float": Any(Coerce(int), In({0, 1})), + }, + { + "masks": { + "fixed_image_mask": bool1_1, + "moving_image_mask": bool1_1, + }, }, - }, dict # TODO: specify other valid ANTs parameters -)] -motion_estimate_filter = Any({ # notch filter with breathing_rate_* set - Required('filter_type'): 'notch', - Required('filter_order'): int, - Required('breathing_rate_min'): Number, - 'breathing_rate_max': Number, - 'center_frequency': Maybe(Number), - 'filter_bandwidth': Maybe(Number), - 'lowpass_cutoff': Maybe(Number), - 'Name': Maybe(str) - }, { # notch filter with manual parameters set - Required('filter_type'): 'notch', - Required('filter_order'): int, - 'breathing_rate_min': None, - 'breathing_rate_max': None, - Required('center_frequency'): Number, - Required('filter_bandwidth'): Number, - 'lowpass_cutoff': Maybe(Number), - 'Name': Maybe(str) - }, { # lowpass filter with breathing_rate_min - Required('filter_type'): 'lowpass', - Required('filter_order'): int, - Required('breathing_rate_min'): Number, - 'breathing_rate_max': Maybe(Number), - 'center_frequency': Maybe(Number), - 'filter_bandwidth': Maybe(Number), - 'lowpass_cutoff': Maybe(Number), - 'Name': Maybe(str) - }, { # lowpass filter with lowpass_cutoff - Required('filter_type'): 'lowpass', - Required('filter_order'): int, - Required('breathing_rate_min', default=None): None, - 'breathing_rate_max': Maybe(Number), - 'center_frequency': Maybe(Number), - 'filter_bandwidth': Maybe(Number), - Required('lowpass_cutoff'): Number, - 'Name': Maybe(str)}, - msg='`motion_estimate_filter` configuration is invalid.\nSee ' - f'{DOCS_URL_PREFIX}/user/' - 'func#motion-estimate-filter-valid-options for details.\n') -target_space = All(Coerce(ListFromItem), - [All(Title, In(valid_options['target_space']))]) + dict, # TODO: specify other valid ANTs parameters + ) +] +motion_estimate_filter = Any( + { # notch filter with breathing_rate_* set + Required("filter_type"): "notch", + Required("filter_order"): int, + Required("breathing_rate_min"): Number, + "breathing_rate_max": Number, + "center_frequency": Maybe(Number), + "filter_bandwidth": Maybe(Number), + "lowpass_cutoff": Maybe(Number), + "Name": Maybe(str), + }, + { # notch filter with manual parameters set + Required("filter_type"): "notch", + Required("filter_order"): int, + "breathing_rate_min": None, + "breathing_rate_max": None, + Required("center_frequency"): Number, + Required("filter_bandwidth"): Number, + "lowpass_cutoff": Maybe(Number), + "Name": Maybe(str), + }, + { # lowpass filter with breathing_rate_min + Required("filter_type"): "lowpass", + Required("filter_order"): int, + Required("breathing_rate_min"): Number, + "breathing_rate_max": Maybe(Number), + "center_frequency": Maybe(Number), + "filter_bandwidth": Maybe(Number), + "lowpass_cutoff": Maybe(Number), + "Name": Maybe(str), + }, + { # lowpass filter with lowpass_cutoff + Required("filter_type"): "lowpass", + Required("filter_order"): int, + Required("breathing_rate_min", default=None): None, + "breathing_rate_max": Maybe(Number), + "center_frequency": Maybe(Number), + "filter_bandwidth": Maybe(Number), + Required("lowpass_cutoff"): Number, + "Name": Maybe(str), + }, + msg="`motion_estimate_filter` configuration is invalid.\nSee " + f"{DOCS_URL_PREFIX}/user/" + "func#motion-estimate-filter-valid-options for details.\n", +) +target_space = All( + Coerce(ListFromItem), [All(Title, In(valid_options["target_space"]))] +) def name_motion_filter(mfilter, mfilters=None): - '''Given a motion filter, create a short string for the filename + """Given a motion filter, create a short string for the filename Parameters ---------- @@ -290,40 +349,43 @@ def name_motion_filter(mfilter, mfilters=None): ... 'breathing_rate_min': 0.19}, [{'Name': 'lowpass2fl0p19'}, ... {'Name': 'lowpass2fl0p19dup1'}]) 'lowpass2fl0p19dup2' - ''' + """ if mfilters is None: mfilters = [] - if 'Name' in mfilter: - name = mfilter['Name'] + if "Name" in mfilter: + name = mfilter["Name"] else: - if mfilter['filter_type'] == 'notch': - if mfilter.get('breathing_rate_min'): - range_str = (f'fl{mfilter["breathing_rate_min"]}' - f'fu{mfilter["breathing_rate_max"]}') + if mfilter["filter_type"] == "notch": + if mfilter.get("breathing_rate_min"): + range_str = ( + f'fl{mfilter["breathing_rate_min"]}' + f'fu{mfilter["breathing_rate_max"]}' + ) else: - range_str = (f'fc{mfilter["center_frequency"]}' - f'bw{mfilter["filter_bandwidth"]}') + range_str = ( + f'fc{mfilter["center_frequency"]}' + f'bw{mfilter["filter_bandwidth"]}' + ) + elif mfilter.get("breathing_rate_min"): + range_str = f'fl{mfilter["breathing_rate_min"]}' else: - if mfilter.get('breathing_rate_min'): - range_str = f'fl{mfilter["breathing_rate_min"]}' - else: - range_str = f'fc{mfilter["lowpass_cutoff"]}' - range_str = range_str.replace('.', 'p') + range_str = f'fc{mfilter["lowpass_cutoff"]}' + range_str = range_str.replace(".", "p") name = f'{mfilter["filter_type"]}{mfilter["filter_order"]}{range_str}' - dupes = 'Name' not in mfilter and len([_ for _ in (_.get('Name', '') for - _ in mfilters) if - _.startswith(name)]) + dupes = "Name" not in mfilter and len( + [_ for _ in (_.get("Name", "") for _ in mfilters) if _.startswith(name)] + ) if dupes: - dup = re.search('(?=[A-Za-z0-9]*)(dup[0-9]*)', name) + dup = re.search("(?=[A-Za-z0-9]*)(dup[0-9]*)", name) if dup: # Don't chain 'dup' suffixes - name = name.replace(dup.group(), f'dup{dupes}') + name = name.replace(dup.group(), f"dup{dupes}") else: - name = f'{name}dup{dupes}' + name = f"{name}dup{dupes}" return name def permutation_message(key, options): - '''Function to give a clean, human-readable error message for keys + """Function to give a clean, human-readable error message for keys that accept permutation values Parameters @@ -334,8 +396,9 @@ def permutation_message(key, options): Returns ------- - msg: str''' # noqa: E501 - return f''' + msg: str + """ + return f""" \'{key}\' takes a dictionary with paths to region-of-interest (ROI) NIFTI files (.nii or .nii.gz) as keys and a comma separated string @@ -346,757 +409,852 @@ def permutation_message(key, options): Available analyses for \'{key}\' are {options} -''' +""" def sanitize(filename): - '''Sanitize a filename and replace whitespaces with underscores''' - return re.sub(r'\s+', '_', sanitize_filename(filename)) + """Sanitize a filename and replace whitespaces with underscores""" + return re.sub(r"\s+", "_", sanitize_filename(filename)) -latest_schema = Schema({ - 'FROM': Maybe(str), - 'skip env check': Maybe(bool), # flag for skipping an environment check - 'pipeline_setup': { - 'pipeline_name': All(str, Length(min=1), sanitize), - 'output_directory': { - 'path': str, - 'source_outputs_dir': Maybe(str), - 'pull_source_once': bool1_1, - 'write_func_outputs': bool1_1, - 'write_debugging_outputs': bool1_1, - 'output_tree': str, - 'quality_control': { - 'generate_quality_control_images': bool1_1, - 'generate_xcpqc_files': bool1_1, - }, - 'user_defined': Maybe(str), - }, - 'working_directory': { - 'path': str, - 'remove_working_dir': bool1_1, - }, - 'log_directory': { - 'run_logging': bool1_1, - 'path': str, - 'graphviz': { - 'entire_workflow': { - 'generate': bool, - 'graph2use': Maybe(All(Coerce(ListFromItem), - [All(Lower, - In(('orig', 'hierarchical', 'flat', - 'exec', 'colored')))])), - 'format': Maybe(All(Coerce(ListFromItem), - [All(Lower, In(('png', 'svg')))])), - 'simple_form': Maybe(bool)}}, - }, - 'crash_log_directory': { - 'path': Maybe(str), - }, - 'system_config': { - 'fail_fast': bool1_1, - 'FSLDIR': Maybe(str), - 'on_grid': { - 'run': bool1_1, - 'resource_manager': Maybe(str), - 'SGE': { - 'parallel_environment': Maybe(str), - 'queue': Maybe(str), +latest_schema = Schema( + { + "FROM": Maybe(str), + "skip env check": Maybe(bool), # flag for skipping an environment check + "pipeline_setup": { + "pipeline_name": All(str, Length(min=1), sanitize), + "output_directory": { + "path": str, + "source_outputs_dir": Maybe(str), + "pull_source_once": bool1_1, + "write_func_outputs": bool1_1, + "write_debugging_outputs": bool1_1, + "output_tree": str, + "quality_control": { + "generate_quality_control_images": bool1_1, + "generate_xcpqc_files": bool1_1, }, + "user_defined": Maybe(str), }, - 'maximum_memory_per_participant': Number, - 'raise_insufficient': bool1_1, - 'max_cores_per_participant': int, - 'num_ants_threads': int, - 'num_OMP_threads': int, - 'num_participants_at_once': int, - 'random_seed': Maybe(Any( - 'random', - All(int, Range(min=1, max=np.iinfo(np.int32).max)))), - 'observed_usage': { - 'callback_log': Maybe(str), - 'buffer': Number, - }, - }, - 'Amazon-AWS': { - 'aws_output_bucket_credentials': Maybe(str), - 's3_encryption': bool1_1, - }, - 'Debugging': { - 'verbose': bool1_1, - }, - 'freesurfer_dir': str, - 'outdir_ingress': { - 'run': bool1_1, - 'Template': Maybe(str), - }, - }, - 'anatomical_preproc': { - 'run': bool1_1, - 'run_t2': bool1_1, - 'non_local_means_filtering': { - 'run': forkable, - 'noise_model': Maybe(str), - }, - 'n4_bias_field_correction': { - 'run': forkable, - 'shrink_factor': int, - }, - 't1t2_bias_field_correction': Required( - # require 'T1w_brain_ACPC_template' if 'acpc_target' is 'brain' - Any({ - 'run': False, - 'BiasFieldSmoothingSigma': Maybe(int), - }, { - 'run': True, - 'BiasFieldSmoothingSigma': Maybe(int), - },), - ), - - 'acpc_alignment': Required( - # require 'T1w_brain_ACPC_template' and - # 'T2w_brain_ACPC_template' if 'acpc_target' is 'brain' - Any({ - 'run': False, - 'run_before_preproc': Maybe(bool1_1), - 'brain_size': Maybe(int), - 'FOV_crop': Maybe(In({'robustfov', 'flirt'})), - 'acpc_target': Maybe(In(valid_options['acpc']['target'])), - 'align_brain_mask': Maybe(bool1_1), - 'T1w_ACPC_template': Maybe(str), - 'T1w_brain_ACPC_template': Maybe(str), - 'T2w_ACPC_template': Maybe(str), - 'T2w_brain_ACPC_template': Maybe(str), - }, { - 'run': True, - 'run_before_preproc': bool1_1, - 'brain_size': int, - 'FOV_crop': In({'robustfov', 'flirt'}), - 'acpc_target': valid_options['acpc']['target'][1], - 'align_brain_mask': Maybe(bool1_1), - 'T1w_ACPC_template': str, - 'T1w_brain_ACPC_template': Maybe(str), - 'T2w_ACPC_template': Maybe(str), - 'T2w_brain_ACPC_template': Maybe(str), - }, { - 'run': True, - 'run_before_preproc': bool1_1, - 'brain_size': int, - 'FOV_crop': In({'robustfov', 'flirt'}), - 'acpc_target': valid_options['acpc']['target'][0], - 'align_brain_mask': Maybe(bool1_1), - 'T1w_ACPC_template': str, - 'T1w_brain_ACPC_template': str, - 'T2w_ACPC_template': Maybe(str), - 'T2w_brain_ACPC_template': Maybe(str), - },), - msg='\'brain\' requires \'T1w_brain_ACPC_template\' and ' - '\'T2w_brain_ACPC_template\' to ' - 'be populated if \'run\' is not set to Off', - ), - 'brain_extraction': { - 'run': bool1_1, - 'using': [In(valid_options['brain_extraction']['using'])], - 'AFNI-3dSkullStrip': { - 'mask_vol': bool1_1, - 'shrink_factor': Number, - 'var_shrink_fac': bool1_1, - 'shrink_factor_bot_lim': Number, - 'avoid_vent': bool1_1, - 'n_iterations': int, - 'pushout': bool1_1, - 'touchup': bool1_1, - 'fill_hole': int, - 'NN_smooth': int, - 'smooth_final': int, - 'avoid_eyes': bool1_1, - 'use_edge': bool1_1, - 'exp_frac': Number, - 'push_to_edge': bool1_1, - 'use_skull': bool1_1, - 'perc_int': Number, - 'max_inter_iter': int, - 'fac': Number, - 'blur_fwhm': Number, - 'monkey': bool1_1, - }, - 'FSL-FNIRT': { - 'interpolation': In({ - 'trilinear', 'sinc', 'spline' - }), - }, - 'FSL-BET': { - 'frac': Number, - 'Robustfov': bool1_1, - 'mesh_boolean': bool1_1, - 'outline': bool1_1, - 'padding': bool1_1, - 'radius': int, - 'reduce_bias': bool1_1, - 'remove_eyes': bool1_1, - 'robust': bool1_1, - 'skull': bool1_1, - 'surfaces': bool1_1, - 'threshold': bool1_1, - 'vertical_gradient': Range(min=-1, max=1) + "working_directory": { + "path": str, + "remove_working_dir": bool1_1, }, - 'UNet': { - 'unet_model': Maybe(str), - }, - 'niworkflows-ants': { - 'template_path': Maybe(str), - 'mask_path': Maybe(str), - 'regmask_path': Maybe(str), + "log_directory": { + "run_logging": bool1_1, + "path": str, + "graphviz": { + "entire_workflow": { + "generate": bool, + "graph2use": Maybe( + All( + Coerce(ListFromItem), + [ + All( + Lower, + In( + ( + "orig", + "hierarchical", + "flat", + "exec", + "colored", + ) + ), + ) + ], + ) + ), + "format": Maybe( + All(Coerce(ListFromItem), [All(Lower, In(("png", "svg")))]) + ), + "simple_form": Maybe(bool), + } + }, }, - 'FreeSurfer-BET': { - 'T1w_brain_template_mask_ccs': Maybe(str) + "crash_log_directory": { + "path": Maybe(str), }, - }, - }, - 'segmentation': { - 'run': bool1_1, - 'tissue_segmentation': { - 'using': [In( - {'FSL-FAST', 'FreeSurfer', 'ANTs_Prior_Based', - 'Template_Based'} - )], - 'FSL-FAST': { - 'thresholding': { - 'use': In({'Auto', 'Custom'}), - 'Custom': { - 'CSF_threshold_value': float, - 'WM_threshold_value': float, - 'GM_threshold_value': float, + "system_config": { + "fail_fast": bool1_1, + "FSLDIR": Maybe(str), + "on_grid": { + "run": bool1_1, + "resource_manager": Maybe(str), + "SGE": { + "parallel_environment": Maybe(str), + "queue": Maybe(str), }, }, - 'use_priors': { - 'run': bool1_1, - 'priors_path': Maybe(str), - 'WM_path': Maybe(str), - 'GM_path': Maybe(str), - 'CSF_path': Maybe(str) + "maximum_memory_per_participant": Number, + "raise_insufficient": bool1_1, + "max_cores_per_participant": int, + "num_ants_threads": int, + "num_OMP_threads": int, + "num_participants_at_once": int, + "random_seed": Maybe( + Any("random", All(int, Range(min=1, max=np.iinfo(np.int32).max))) + ), + "observed_usage": { + "callback_log": Maybe(str), + "buffer": Number, }, }, - 'FreeSurfer': { - 'erode': Maybe(int), - 'CSF_label': Maybe([int]), - 'GM_label': Maybe([int]), - 'WM_label': Maybe([int]), + "Amazon-AWS": { + "aws_output_bucket_credentials": Maybe(str), + "s3_encryption": bool1_1, }, - 'ANTs_Prior_Based': { - 'run': forkable, - 'template_brain_list': Maybe(Any([str], [])), - 'template_segmentation_list': Maybe(Any([str], [])), - 'CSF_label': [int], - 'GM_label': [int], - 'WM_label': [int], + "Debugging": { + "verbose": bool1_1, }, - 'Template_Based': { - 'run': forkable, - 'template_for_segmentation': [In( - valid_options['segmentation']['template'] - )], - 'WHITE': Maybe(str), - 'GRAY': Maybe(str), - 'CSF': Maybe(str), + "freesurfer_dir": str, + "outdir_ingress": { + "run": bool1_1, + "Template": Maybe(str), }, }, - }, - 'registration_workflows': { - 'anatomical_registration': { - 'run': bool1_1, - 'resolution_for_anat': All(str, Match(RESOLUTION_REGEX)), - 'T1w_brain_template': Maybe(str), - 'T1w_template': Maybe(str), - 'T1w_brain_template_mask': Maybe(str), - 'reg_with_skull': bool1_1, - 'registration': { - 'using': [In({'ANTS', 'FSL', 'FSL-linear'})], - 'ANTs': { - 'use_lesion_mask': bool1_1, - 'T1_registration': Maybe(ANTs_parameters), - 'interpolation': In({ - 'Linear', 'BSpline', 'LanczosWindowedSinc' - }), - }, - 'FSL-FNIRT': { - 'fnirt_config': Maybe(str), - 'ref_resolution': All(str, Match(RESOLUTION_REGEX)), - 'FNIRT_T1w_brain_template': Maybe(str), - 'FNIRT_T1w_template': Maybe(str), - 'interpolation': In({ - 'trilinear', 'sinc', 'spline' - }), - 'identity_matrix': Maybe(str), - 'ref_mask': Maybe(str), - 'ref_mask_res-2': Maybe(str), - 'T1w_template_res-2': Maybe(str), - }, + "anatomical_preproc": { + "run": bool1_1, + "run_t2": bool1_1, + "non_local_means_filtering": { + "run": forkable, + "noise_model": Maybe(str), }, - 'overwrite_transform': { - 'run': bool1_1, - 'using': In({'FSL'}), + "n4_bias_field_correction": { + "run": forkable, + "shrink_factor": int, }, - }, - 'functional_registration': { - 'coregistration': { - 'run': bool1_1, - 'interpolation': In({'trilinear', 'sinc', 'spline'}), - 'using': str, - 'input': str, - 'cost': str, - 'dof': int, - 'arguments': Maybe(str), - 'func_input_prep': { - 'reg_with_skull': bool1_1, - 'input': [In({ - 'Mean_Functional', 'Selected_Functional_Volume', - 'fmriprep_reference' - })], - 'Mean Functional': { - 'n4_correct_func': bool1_1 + "t1t2_bias_field_correction": Required( + # require 'T1w_brain_ACPC_template' if 'acpc_target' is 'brain' + Any( + { + "run": False, + "BiasFieldSmoothingSigma": Maybe(int), }, - 'Selected Functional Volume': { - 'func_reg_input_volume': int + { + "run": True, + "BiasFieldSmoothingSigma": Maybe(int), }, + ), + ), + "acpc_alignment": Required( + # require 'T1w_brain_ACPC_template' and + # 'T2w_brain_ACPC_template' if 'acpc_target' is 'brain' + Any( + { + "run": False, + "run_before_preproc": Maybe(bool1_1), + "brain_size": Maybe(int), + "FOV_crop": Maybe(In({"robustfov", "flirt"})), + "acpc_target": Maybe(In(valid_options["acpc"]["target"])), + "align_brain_mask": Maybe(bool1_1), + "T1w_ACPC_template": Maybe(str), + "T1w_brain_ACPC_template": Maybe(str), + "T2w_ACPC_template": Maybe(str), + "T2w_brain_ACPC_template": Maybe(str), + }, + { + "run": True, + "run_before_preproc": bool1_1, + "brain_size": int, + "FOV_crop": In({"robustfov", "flirt"}), + "acpc_target": valid_options["acpc"]["target"][1], + "align_brain_mask": Maybe(bool1_1), + "T1w_ACPC_template": str, + "T1w_brain_ACPC_template": Maybe(str), + "T2w_ACPC_template": Maybe(str), + "T2w_brain_ACPC_template": Maybe(str), + }, + { + "run": True, + "run_before_preproc": bool1_1, + "brain_size": int, + "FOV_crop": In({"robustfov", "flirt"}), + "acpc_target": valid_options["acpc"]["target"][0], + "align_brain_mask": Maybe(bool1_1), + "T1w_ACPC_template": str, + "T1w_brain_ACPC_template": str, + "T2w_ACPC_template": Maybe(str), + "T2w_brain_ACPC_template": Maybe(str), + }, + ), + msg="'brain' requires 'T1w_brain_ACPC_template' and " + "'T2w_brain_ACPC_template' to " + "be populated if 'run' is not set to Off", + ), + "brain_extraction": { + "run": bool1_1, + "using": [In(valid_options["brain_extraction"]["using"])], + "AFNI-3dSkullStrip": { + "mask_vol": bool1_1, + "shrink_factor": Number, + "var_shrink_fac": bool1_1, + "shrink_factor_bot_lim": Number, + "avoid_vent": bool1_1, + "n_iterations": int, + "pushout": bool1_1, + "touchup": bool1_1, + "fill_hole": int, + "NN_smooth": int, + "smooth_final": int, + "avoid_eyes": bool1_1, + "use_edge": bool1_1, + "exp_frac": Number, + "push_to_edge": bool1_1, + "use_skull": bool1_1, + "perc_int": Number, + "max_inter_iter": int, + "fac": Number, + "blur_fwhm": Number, + "monkey": bool1_1, }, - 'boundary_based_registration': { - 'run': forkable, - 'bbr_schedule': str, - 'bbr_wm_map': In({'probability_map', 'partial_volume_map'}), - 'bbr_wm_mask_args': str, - 'reference': In({'whole-head', 'brain'}) + "FSL-FNIRT": { + "interpolation": In({"trilinear", "sinc", "spline"}), }, - }, - 'EPI_registration': { - 'run': bool1_1, - 'using': [In({'ANTS', 'FSL', 'FSL-linear'})], - 'EPI_template': Maybe(str), - 'EPI_template_mask': Maybe(str), - 'ANTs': { - 'parameters': Maybe(ANTs_parameters), - 'interpolation': In({ - 'Linear', 'BSpline', 'LanczosWindowedSinc' - }), + "FSL-BET": { + "frac": Number, + "Robustfov": bool1_1, + "mesh_boolean": bool1_1, + "outline": bool1_1, + "padding": bool1_1, + "radius": int, + "reduce_bias": bool1_1, + "remove_eyes": bool1_1, + "robust": bool1_1, + "skull": bool1_1, + "surfaces": bool1_1, + "threshold": bool1_1, + "vertical_gradient": Range(min=-1, max=1), }, - 'FSL-FNIRT': { - 'fnirt_config': Maybe(str), - 'interpolation': In({'trilinear', 'sinc', 'spline'}), - 'identity_matrix': Maybe(str), + "UNet": { + "unet_model": Maybe(str), }, - }, - 'func_registration_to_template': { - 'run': bool1_1, - 'run_EPI': bool1_1, - 'output_resolution': { - 'func_preproc_outputs': All( - str, Match(RESOLUTION_REGEX)), - 'func_derivative_outputs': All( - str, Match(RESOLUTION_REGEX) - ), + "niworkflows-ants": { + "template_path": Maybe(str), + "mask_path": Maybe(str), + "regmask_path": Maybe(str), }, - 'target_template': { - 'using': [In({'T1_template', 'EPI_template'})], - 'T1_template': { - 'T1w_brain_template_funcreg': Maybe(str), - 'T1w_template_funcreg': Maybe(str), - 'T1w_brain_template_mask_funcreg': Maybe(str), - 'T1w_template_for_resample': Maybe(str), + "FreeSurfer-BET": {"T1w_brain_template_mask_ccs": Maybe(str)}, + }, + }, + "segmentation": { + "run": bool1_1, + "tissue_segmentation": { + "using": [ + In({"FSL-FAST", "FreeSurfer", "ANTs_Prior_Based", "Template_Based"}) + ], + "FSL-FAST": { + "thresholding": { + "use": In({"Auto", "Custom"}), + "Custom": { + "CSF_threshold_value": float, + "WM_threshold_value": float, + "GM_threshold_value": float, + }, }, - 'EPI_template': { - 'EPI_template_funcreg': Maybe(str), - 'EPI_template_mask_funcreg': Maybe(str), - 'EPI_template_for_resample': Maybe(str) + "use_priors": { + "run": bool1_1, + "priors_path": Maybe(str), + "WM_path": Maybe(str), + "GM_path": Maybe(str), + "CSF_path": Maybe(str), }, }, - 'ANTs_pipelines': { - 'interpolation': In({ - 'Linear', 'BSpline', 'LanczosWindowedSinc'}) + "FreeSurfer": { + "erode": Maybe(int), + "CSF_label": Maybe([int]), + "GM_label": Maybe([int]), + "WM_label": Maybe([int]), }, - 'FNIRT_pipelines': { - 'interpolation': In({'trilinear', 'sinc', 'spline'}), - 'identity_matrix': Maybe(str), + "ANTs_Prior_Based": { + "run": forkable, + "template_brain_list": Maybe(Any([str], [])), + "template_segmentation_list": Maybe(Any([str], [])), + "CSF_label": [int], + "GM_label": [int], + "WM_label": [int], }, - 'apply_transform': { - 'using': In({'default', 'abcd', 'dcan_nhp', - 'single_step_resampling_from_stc'}), + "Template_Based": { + "run": forkable, + "template_for_segmentation": [ + In(valid_options["segmentation"]["template"]) + ], + "WHITE": Maybe(str), + "GRAY": Maybe(str), + "CSF": Maybe(str), }, }, }, - }, - 'surface_analysis': { - 'abcd_prefreesurfer_prep':{ - 'run': bool1_1, - }, - 'freesurfer': { - 'run_reconall': bool1_1, - 'reconall_args': Maybe(str), - # 'generate_masks': bool1_1, - 'ingress_reconall': bool1_1, - }, - 'post_freesurfer': { - 'run': bool1_1, - 'surf_atlas_dir': Maybe(str), - 'gray_ordinates_dir': Maybe(str), - 'gray_ordinates_res': Maybe(int), - 'high_res_mesh': Maybe(int), - 'low_res_mesh': Maybe(int), - 'subcortical_gray_labels': Maybe(str), - 'freesurfer_labels': Maybe(str), - 'fmri_res': Maybe(int), - 'smooth_fwhm': Maybe(int), - }, - 'amplitude_low_frequency_fluctuation': { - 'run': bool1_1, - }, - 'regional_homogeneity': { - 'run': bool1_1, - }, - 'surface_connectivity': { - 'run': bool1_1, - 'surface_parcellation_template': Maybe(str), - }, - }, - 'longitudinal_template_generation': { - 'run': bool1_1, - 'average_method': In({'median', 'mean', 'std'}), - 'dof': In({12, 9, 7, 6}), - 'interp': In({'trilinear', 'nearestneighbour', 'sinc', 'spline'}), - 'cost': In({ - 'corratio', 'mutualinfo', 'normmi', 'normcorr', 'leastsq', - 'labeldiff', 'bbr'}), - 'thread_pool': int, - 'convergence_threshold': Number, - }, - 'functional_preproc': { - 'run': bool1_1, - 'truncation': { - 'start_tr': int, - 'stop_tr': Maybe(Any(int, All(Capitalize, 'End'))) - }, - 'update_header': { - 'run': bool1_1, - }, - 'scaling': { - 'run': bool1_1, - 'scaling_factor': Number - }, - 'despiking': { - 'run': forkable, - 'space': In({'native', 'template'}) - }, - 'slice_timing_correction': { - 'run': forkable, - 'tpattern': Maybe(str), - 'tzero': Maybe(int), - }, - 'motion_estimates_and_correction': { - 'run': bool1_1, - 'motion_estimates': { - 'calculate_motion_first': bool1_1, - 'calculate_motion_after': bool1_1, + "registration_workflows": { + "anatomical_registration": { + "run": bool1_1, + "resolution_for_anat": All(str, Match(RESOLUTION_REGEX)), + "T1w_brain_template": Maybe(str), + "T1w_template": Maybe(str), + "T1w_brain_template_mask": Maybe(str), + "reg_with_skull": bool1_1, + "registration": { + "using": [In({"ANTS", "FSL", "FSL-linear"})], + "ANTs": { + "use_lesion_mask": bool1_1, + "T1_registration": Maybe(ANTs_parameters), + "interpolation": In( + {"Linear", "BSpline", "LanczosWindowedSinc"} + ), + }, + "FSL-FNIRT": { + "fnirt_config": Maybe(str), + "ref_resolution": All(str, Match(RESOLUTION_REGEX)), + "FNIRT_T1w_brain_template": Maybe(str), + "FNIRT_T1w_template": Maybe(str), + "interpolation": In({"trilinear", "sinc", "spline"}), + "identity_matrix": Maybe(str), + "ref_mask": Maybe(str), + "ref_mask_res-2": Maybe(str), + "T1w_template_res-2": Maybe(str), + }, + }, + "overwrite_transform": { + "run": bool1_1, + "using": In({"FSL"}), + }, }, - 'motion_correction': { - 'using': Optional(All(Coerce(ListFromItem), - Length(min=0, max=1, - msg='Forking is currently broken for this option. ' - 'Please use separate configs if you want to ' - 'use each of 3dvolreg and mcflirt. Follow ' - 'https://github.com/FCP-INDI/C-PAC/issues/1935 ' - 'to see when this issue is resolved.'), - [In(valid_options['motion_correction'])])), - 'AFNI-3dvolreg': { - 'functional_volreg_twopass': bool1_1, + "functional_registration": { + "coregistration": { + "run": bool1_1, + "interpolation": In({"trilinear", "sinc", "spline"}), + "using": str, + "input": str, + "cost": str, + "dof": int, + "arguments": Maybe(str), + "func_input_prep": { + "reg_with_skull": bool1_1, + "input": [ + In( + { + "Mean_Functional", + "Selected_Functional_Volume", + "fmriprep_reference", + } + ) + ], + "Mean Functional": {"n4_correct_func": bool1_1}, + "Selected Functional Volume": {"func_reg_input_volume": int}, + }, + "boundary_based_registration": { + "run": forkable, + "bbr_schedule": str, + "bbr_wm_map": In({"probability_map", "partial_volume_map"}), + "bbr_wm_mask_args": str, + "reference": In({"whole-head", "brain"}), + }, + }, + "EPI_registration": { + "run": bool1_1, + "using": [In({"ANTS", "FSL", "FSL-linear"})], + "EPI_template": Maybe(str), + "EPI_template_mask": Maybe(str), + "ANTs": { + "parameters": Maybe(ANTs_parameters), + "interpolation": In( + {"Linear", "BSpline", "LanczosWindowedSinc"} + ), + }, + "FSL-FNIRT": { + "fnirt_config": Maybe(str), + "interpolation": In({"trilinear", "sinc", "spline"}), + "identity_matrix": Maybe(str), + }, + }, + "func_registration_to_template": { + "run": bool1_1, + "run_EPI": bool1_1, + "output_resolution": { + "func_preproc_outputs": All(str, Match(RESOLUTION_REGEX)), + "func_derivative_outputs": All(str, Match(RESOLUTION_REGEX)), + }, + "target_template": { + "using": [In({"T1_template", "EPI_template"})], + "T1_template": { + "T1w_brain_template_funcreg": Maybe(str), + "T1w_template_funcreg": Maybe(str), + "T1w_brain_template_mask_funcreg": Maybe(str), + "T1w_template_for_resample": Maybe(str), + }, + "EPI_template": { + "EPI_template_funcreg": Maybe(str), + "EPI_template_mask_funcreg": Maybe(str), + "EPI_template_for_resample": Maybe(str), + }, + }, + "ANTs_pipelines": { + "interpolation": In( + {"Linear", "BSpline", "LanczosWindowedSinc"} + ) + }, + "FNIRT_pipelines": { + "interpolation": In({"trilinear", "sinc", "spline"}), + "identity_matrix": Maybe(str), + }, + "apply_transform": { + "using": In( + { + "default", + "abcd", + "dcan_nhp", + "single_step_resampling_from_stc", + } + ), + }, }, - 'motion_correction_reference': [In({ - 'mean', 'median', 'selected_volume', - 'fmriprep_reference'})], - 'motion_correction_reference_volume': int, }, - 'motion_estimate_filter': Required( - Any({'run': forkable, - 'filters': [motion_estimate_filter]}, - {'run': All(forkable, [In([False], [])]), - 'filters': Maybe(list)}) - ), }, - 'distortion_correction': { - 'run': forkable, - 'using': [In(['PhaseDiff', 'Blip', 'Blip-FSL-TOPUP'])], - 'PhaseDiff': { - 'fmap_skullstrip_option': In(['BET', 'AFNI']), - 'fmap_skullstrip_BET_frac': float, - 'fmap_skullstrip_AFNI_threshold': float, + "surface_analysis": { + "abcd_prefreesurfer_prep": { + "run": bool1_1, }, - 'Blip-FSL-TOPUP': { - 'warpres': int, - 'subsamp': int, - 'fwhm': int, - 'miter': int, - 'lambda': int, - 'ssqlambda': int, - 'regmod': In({'bending_energy', 'membrane_energy'}), - 'estmov': int, - 'minmet': int, - 'splineorder': int, - 'numprec': str, - 'interp': In({'spline', 'linear'}), - 'scale': int, - 'regrid': int - } - }, - 'func_masking': { - 'run': bool1_1, - 'using': [In( - ['AFNI', 'FSL', 'FSL_AFNI', 'Anatomical_Refined', - 'Anatomical_Based', 'Anatomical_Resampled', - 'CCS_Anatomical_Refined'] - )], - # handle validating mutually-exclusive booleans for FSL-BET - # functional_mean_boolean must be True if one of the mutually- - # exclusive options are - # see mutex definition for more definition - 'FSL-BET': Maybe(Any(*( - # exactly one mutually exclusive option on - [{k: d[k] for d in r for k in d} for r in [[ - { - **mutex['FSL-BET']['rem'], - 'functional_mean_boolean': True, - k1: True, - k2: False - } for k2 in mutex['FSL-BET']['mutex'] if k2 != k1 - ] for k1 in mutex['FSL-BET']['mutex']]] + - # no mutually-exclusive options on - [{ - **mutex['FSL-BET']['rem'], - 'functional_mean_boolean': bool1_1, - **{k: False for k in mutex['FSL-BET']['mutex']} - }])) - ), - 'FSL_AFNI': { - 'bold_ref': Maybe(str), - 'brain_mask': Maybe(str), - 'brain_probseg': Maybe(str), + "freesurfer": { + "run_reconall": bool1_1, + "reconall_args": Maybe(str), + # 'generate_masks': bool1_1, + "ingress_reconall": bool1_1, }, - 'Anatomical_Refined': { - 'anatomical_mask_dilation': Maybe(bool1_1), + "post_freesurfer": { + "run": bool1_1, + "surf_atlas_dir": Maybe(str), + "gray_ordinates_dir": Maybe(str), + "gray_ordinates_res": Maybe(int), + "high_res_mesh": Maybe(int), + "low_res_mesh": Maybe(int), + "subcortical_gray_labels": Maybe(str), + "freesurfer_labels": Maybe(str), + "fmri_res": Maybe(int), + "smooth_fwhm": Maybe(int), + }, + "amplitude_low_frequency_fluctuation": { + "run": bool1_1, + }, + "regional_homogeneity": { + "run": bool1_1, + }, + "surface_connectivity": { + "run": bool1_1, + "surface_parcellation_template": Maybe(str), }, - 'apply_func_mask_in_native_space': bool1_1, - }, - 'generate_func_mean': { - 'run': bool1_1, - }, - 'normalize_func': { - 'run': bool1_1, - }, - 'coreg_prep': { - 'run': bool1_1, }, - }, - 'nuisance_corrections': { - '1-ICA-AROMA': { - 'run': forkable, - 'denoising_type': In({'aggr', 'nonaggr'}), + "longitudinal_template_generation": { + "run": bool1_1, + "average_method": In({"median", "mean", "std"}), + "dof": In({12, 9, 7, 6}), + "interp": In({"trilinear", "nearestneighbour", "sinc", "spline"}), + "cost": In( + { + "corratio", + "mutualinfo", + "normmi", + "normcorr", + "leastsq", + "labeldiff", + "bbr", + } + ), + "thread_pool": int, + "convergence_threshold": Number, }, - '2-nuisance_regression': { - 'run': forkable, - 'space': All(Coerce(ItemFromList), - Lower, In({'native', 'template'})), - 'create_regressors': bool1_1, - 'ingress_regressors': { - 'run': bool1_1, - 'Regressors': { - 'Name': Maybe(str), - 'Columns': [str]}, + "functional_preproc": { + "run": bool1_1, + "truncation": { + "start_tr": int, + "stop_tr": Maybe(Any(int, All(Capitalize, "End"))), + }, + "update_header": { + "run": bool1_1, + }, + "scaling": {"run": bool1_1, "scaling_factor": Number}, + "despiking": {"run": forkable, "space": In({"native", "template"})}, + "slice_timing_correction": { + "run": forkable, + "tpattern": Maybe(str), + "tzero": Maybe(int), + }, + "motion_estimates_and_correction": { + "run": bool1_1, + "motion_estimates": { + "calculate_motion_first": bool1_1, + "calculate_motion_after": bool1_1, + }, + "motion_correction": { + "using": Optional( + All( + Coerce(ListFromItem), + Length( + min=0, + max=1, + msg="Forking is currently broken for this option. " + "Please use separate configs if you want to " + "use each of 3dvolreg and mcflirt. Follow " + "https://github.com/FCP-INDI/C-PAC/issues/1935 " + "to see when this issue is resolved.", + ), + [In(valid_options["motion_correction"])], + ) + ), + "AFNI-3dvolreg": { + "functional_volreg_twopass": bool1_1, + }, + "motion_correction_reference": [ + In({"mean", "median", "selected_volume", "fmriprep_reference"}) + ], + "motion_correction_reference_volume": int, + }, + "motion_estimate_filter": Required( + Any( + {"run": forkable, "filters": [motion_estimate_filter]}, + { + "run": All(forkable, [In([False], [])]), + "filters": Maybe(list), + }, + ) + ), }, - 'Regressors': Maybe([Schema({ - 'Name': Required(str), - 'Censor': { - 'method': str, - 'thresholds': [{ - 'type': str, - 'value': float, - }], - 'number_of_previous_trs_to_censor': Maybe(int), - 'number_of_subsequent_trs_to_censor': Maybe(int), + "distortion_correction": { + "run": forkable, + "using": [In(["PhaseDiff", "Blip", "Blip-FSL-TOPUP"])], + "PhaseDiff": { + "fmap_skullstrip_option": In(["BET", "AFNI"]), + "fmap_skullstrip_BET_frac": float, + "fmap_skullstrip_AFNI_threshold": float, }, - 'Motion': { - 'include_delayed': bool1_1, - 'include_squared': bool1_1, - 'include_delayed_squared': bool1_1 + "Blip-FSL-TOPUP": { + "warpres": int, + "subsamp": int, + "fwhm": int, + "miter": int, + "lambda": int, + "ssqlambda": int, + "regmod": In({"bending_energy", "membrane_energy"}), + "estmov": int, + "minmet": int, + "splineorder": int, + "numprec": str, + "interp": In({"spline", "linear"}), + "scale": int, + "regrid": int, }, - 'aCompCor': valid_options['Regressors']['CompCor'], - 'tCompCor': valid_options['Regressors']['CompCor'], - 'CerebrospinalFluid': valid_options[ - 'Regressors' - ]['segmentation'], - 'WhiteMatter': valid_options[ - 'Regressors' - ]['segmentation'], - 'GreyMatter': valid_options[ - 'Regressors' - ]['segmentation'], - 'GlobalSignal': {'summary': str}, - 'PolyOrt': {'degree': int}, - 'Bandpass': { - 'bottom_frequency': float, - 'top_frequency': float, - 'method': str, - } # how to check if [0] is > than [1]? - }, extra=ALLOW_EXTRA)]), - 'lateral_ventricles_mask': Maybe(str), - 'bandpass_filtering_order': Maybe( - In({'After', 'Before'})), - 'regressor_masks': { - 'erode_anatomical_brain_mask': { - 'run': bool1_1, - 'brain_mask_erosion_prop': Maybe(Number), - 'brain_mask_erosion_mm': Maybe(Number), - 'brain_erosion_mm': Maybe(Number) + }, + "func_masking": { + "run": bool1_1, + "using": [ + In( + [ + "AFNI", + "FSL", + "FSL_AFNI", + "Anatomical_Refined", + "Anatomical_Based", + "Anatomical_Resampled", + "CCS_Anatomical_Refined", + ] + ) + ], + # handle validating mutually-exclusive booleans for FSL-BET + # functional_mean_boolean must be True if one of the mutually- + # exclusive options are + # see mutex definition for more definition + "FSL-BET": Maybe( + Any( + *( + # exactly one mutually exclusive option on + [ + {k: d[k] for d in r for k in d} + for r in [ + [ + { + **mutex["FSL-BET"]["rem"], + "functional_mean_boolean": True, + k1: True, + k2: False, + } + for k2 in mutex["FSL-BET"]["mutex"] + if k2 != k1 + ] + for k1 in mutex["FSL-BET"]["mutex"] + ] + ] + + + # no mutually-exclusive options on + [ + { + **mutex["FSL-BET"]["rem"], + "functional_mean_boolean": bool1_1, + **{k: False for k in mutex["FSL-BET"]["mutex"]}, + } + ] + ) + ) + ), + "FSL_AFNI": { + "bold_ref": Maybe(str), + "brain_mask": Maybe(str), + "brain_probseg": Maybe(str), }, - 'erode_csf': { - 'run': bool1_1, - 'csf_erosion_prop': Maybe(Number), - 'csf_mask_erosion_mm': Maybe(Number), - 'csf_erosion_mm': Maybe(Number), + "Anatomical_Refined": { + "anatomical_mask_dilation": Maybe(bool1_1), }, - 'erode_wm': { - 'run': bool1_1, - 'wm_erosion_prop': Maybe(Number), - 'wm_mask_erosion_mm': Maybe(Number), - 'wm_erosion_mm': Maybe(Number), + "apply_func_mask_in_native_space": bool1_1, + }, + "generate_func_mean": { + "run": bool1_1, + }, + "normalize_func": { + "run": bool1_1, + }, + "coreg_prep": { + "run": bool1_1, + }, + }, + "nuisance_corrections": { + "1-ICA-AROMA": { + "run": forkable, + "denoising_type": In({"aggr", "nonaggr"}), + }, + "2-nuisance_regression": { + "run": forkable, + "space": All(Coerce(ItemFromList), Lower, In({"native", "template"})), + "create_regressors": bool1_1, + "ingress_regressors": { + "run": bool1_1, + "Regressors": {"Name": Maybe(str), "Columns": [str]}, + }, + "Regressors": Maybe( + [ + Schema( + { + "Name": Required(str), + "Censor": { + "method": str, + "thresholds": [ + { + "type": str, + "value": float, + } + ], + "number_of_previous_trs_to_censor": Maybe(int), + "number_of_subsequent_trs_to_censor": Maybe(int), + }, + "Motion": { + "include_delayed": bool1_1, + "include_squared": bool1_1, + "include_delayed_squared": bool1_1, + }, + "aCompCor": valid_options["Regressors"]["CompCor"], + "tCompCor": valid_options["Regressors"]["CompCor"], + "CerebrospinalFluid": valid_options["Regressors"][ + "segmentation" + ], + "WhiteMatter": valid_options["Regressors"][ + "segmentation" + ], + "GreyMatter": valid_options["Regressors"][ + "segmentation" + ], + "GlobalSignal": {"summary": str}, + "PolyOrt": {"degree": int}, + "Bandpass": { + "bottom_frequency": float, + "top_frequency": float, + "method": str, + }, # how to check if [0] is > than [1]? + }, + extra=ALLOW_EXTRA, + ) + ] + ), + "lateral_ventricles_mask": Maybe(str), + "bandpass_filtering_order": Maybe(In({"After", "Before"})), + "regressor_masks": { + "erode_anatomical_brain_mask": { + "run": bool1_1, + "brain_mask_erosion_prop": Maybe(Number), + "brain_mask_erosion_mm": Maybe(Number), + "brain_erosion_mm": Maybe(Number), + }, + "erode_csf": { + "run": bool1_1, + "csf_erosion_prop": Maybe(Number), + "csf_mask_erosion_mm": Maybe(Number), + "csf_erosion_mm": Maybe(Number), + }, + "erode_wm": { + "run": bool1_1, + "wm_erosion_prop": Maybe(Number), + "wm_mask_erosion_mm": Maybe(Number), + "wm_erosion_mm": Maybe(Number), + }, + "erode_gm": { + "run": bool1_1, + "gm_erosion_prop": Maybe(Number), + "gm_mask_erosion_mm": Maybe(Number), + "gm_erosion_mm": Maybe(Number), + }, }, - 'erode_gm': { - 'run': bool1_1, - 'gm_erosion_prop': Maybe(Number), - 'gm_mask_erosion_mm': Maybe(Number), - 'gm_erosion_mm': Maybe(Number), - } }, }, - }, - 'amplitude_low_frequency_fluctuation': { - 'run': bool1_1, - 'target_space': target_space, - 'highpass_cutoff': [float], - 'lowpass_cutoff': [float], - }, - 'voxel_mirrored_homotopic_connectivity': { - 'run': bool1_1, - 'symmetric_registration': { - 'T1w_brain_template_symmetric': Maybe(str), - 'T1w_brain_template_symmetric_funcreg': Maybe(str), - 'T1w_brain_template_symmetric_for_resample': Maybe(str), - 'T1w_template_symmetric': Maybe(str), - 'T1w_template_symmetric_funcreg': Maybe(str), - 'T1w_template_symmetric_for_resample': Maybe(str), - 'dilated_symmetric_brain_mask': Maybe(str), - 'dilated_symmetric_brain_mask_for_resample': Maybe(str), + "amplitude_low_frequency_fluctuation": { + "run": bool1_1, + "target_space": target_space, + "highpass_cutoff": [float], + "lowpass_cutoff": [float], }, - }, - 'regional_homogeneity': { - 'run': bool1_1, - 'target_space': target_space, - 'cluster_size': In({7, 19, 27}), - }, - 'post_processing': { - 'spatial_smoothing': { - 'run': bool1_1, - 'output': [In({'smoothed', 'nonsmoothed'})], - 'smoothing_method': [In({'FSL', 'AFNI'})], - 'fwhm': [int] + "voxel_mirrored_homotopic_connectivity": { + "run": bool1_1, + "symmetric_registration": { + "T1w_brain_template_symmetric": Maybe(str), + "T1w_brain_template_symmetric_funcreg": Maybe(str), + "T1w_brain_template_symmetric_for_resample": Maybe(str), + "T1w_template_symmetric": Maybe(str), + "T1w_template_symmetric_funcreg": Maybe(str), + "T1w_template_symmetric_for_resample": Maybe(str), + "dilated_symmetric_brain_mask": Maybe(str), + "dilated_symmetric_brain_mask_for_resample": Maybe(str), + }, }, - 'z-scoring': { - 'run': bool1_1, - 'output': [In({'z-scored', 'raw'})], + "regional_homogeneity": { + "run": bool1_1, + "target_space": target_space, + "cluster_size": In({7, 19, 27}), }, - }, - 'timeseries_extraction': { - 'run': bool1_1, - Optional('roi_paths_fully_specified'): bool1_1, - 'tse_roi_paths': Optional( - Maybe({ - str: In({', '.join( - list(options) - ) for options in list(chain.from_iterable([list( - permutations(valid_options['timeseries']['roi_paths'], - number_of) - ) for number_of in range(1, 6)]))}), - }), - msg=permutation_message( - 'tse_roi_paths', valid_options['timeseries']['roi_paths']) - ), - 'realignment': In({'ROI_to_func', 'func_to_ROI'}), - 'connectivity_matrix': { - option: Maybe([In(valid_options['connectivity_matrix'][option])]) - for option in ['using', 'measure'] + "post_processing": { + "spatial_smoothing": { + "run": bool1_1, + "output": [In({"smoothed", "nonsmoothed"})], + "smoothing_method": [In({"FSL", "AFNI"})], + "fwhm": [int], + }, + "z-scoring": { + "run": bool1_1, + "output": [In({"z-scored", "raw"})], + }, }, - }, - 'seed_based_correlation_analysis': { - 'run': bool1_1, - Optional('roi_paths_fully_specified'): bool1_1, - 'sca_roi_paths': Optional( - Maybe({ - str: In({', '.join(list( - options - )) for options in list(chain.from_iterable([list( - permutations(valid_options['sca']['roi_paths'], number_of) - ) for number_of in range(1, 4)]))}) - }), - msg=permutation_message( - 'sca_roi_paths', valid_options['sca']['roi_paths']) - ), - 'norm_timeseries_for_DR': bool1_1, - }, - 'network_centrality': { - 'run': bool1_1, - 'memory_allocation': Number, - 'template_specification_file': Maybe(str), - 'degree_centrality': { - 'weight_options': [In( - valid_options['centrality']['weight_options'] - )], - 'correlation_threshold_option': In( - valid_options['centrality']['threshold_options']), - 'correlation_threshold': Range(min=-1, max=1) + "timeseries_extraction": { + "run": bool1_1, + Optional("roi_paths_fully_specified"): bool1_1, + "tse_roi_paths": Optional( + Maybe( + { + str: In( + { + ", ".join(list(options)) + for options in list( + chain.from_iterable( + [ + list( + permutations( + valid_options["timeseries"][ + "roi_paths" + ], + number_of, + ) + ) + for number_of in range(1, 6) + ] + ) + ) + } + ), + } + ), + msg=permutation_message( + "tse_roi_paths", valid_options["timeseries"]["roi_paths"] + ), + ), + "realignment": In({"ROI_to_func", "func_to_ROI"}), + "connectivity_matrix": { + option: Maybe([In(valid_options["connectivity_matrix"][option])]) + for option in ["using", "measure"] + }, }, - 'eigenvector_centrality': { - 'weight_options': [In( - valid_options['centrality']['weight_options'] - )], - 'correlation_threshold_option': In( - valid_options['centrality']['threshold_options'] + "seed_based_correlation_analysis": { + "run": bool1_1, + Optional("roi_paths_fully_specified"): bool1_1, + "sca_roi_paths": Optional( + Maybe( + { + str: In( + { + ", ".join(list(options)) + for options in list( + chain.from_iterable( + [ + list( + permutations( + valid_options["sca"]["roi_paths"], + number_of, + ) + ) + for number_of in range(1, 4) + ] + ) + ) + } + ) + } + ), + msg=permutation_message( + "sca_roi_paths", valid_options["sca"]["roi_paths"] + ), ), - 'correlation_threshold': Range(min=-1, max=1) + "norm_timeseries_for_DR": bool1_1, }, - 'local_functional_connectivity_density': { - 'weight_options': [In( - valid_options['centrality']['weight_options'] - )], - 'correlation_threshold_option': In([ - o for o in valid_options['centrality']['threshold_options'] if - o != 'Sparsity threshold' - ]), - 'correlation_threshold': Range(min=-1, max=1) + "network_centrality": { + "run": bool1_1, + "memory_allocation": Number, + "template_specification_file": Maybe(str), + "degree_centrality": { + "weight_options": [In(valid_options["centrality"]["weight_options"])], + "correlation_threshold_option": In( + valid_options["centrality"]["threshold_options"] + ), + "correlation_threshold": Range(min=-1, max=1), + }, + "eigenvector_centrality": { + "weight_options": [In(valid_options["centrality"]["weight_options"])], + "correlation_threshold_option": In( + valid_options["centrality"]["threshold_options"] + ), + "correlation_threshold": Range(min=-1, max=1), + }, + "local_functional_connectivity_density": { + "weight_options": [In(valid_options["centrality"]["weight_options"])], + "correlation_threshold_option": In( + [ + o + for o in valid_options["centrality"]["threshold_options"] + if o != "Sparsity threshold" + ] + ), + "correlation_threshold": Range(min=-1, max=1), + }, }, - }, - 'PyPEER': { - 'run': bool1_1, - 'eye_scan_names': Maybe(Any([str], [])), - 'data_scan_names': Maybe(Any([str], [])), - 'eye_mask_path': Maybe(str), - 'stimulus_path': Maybe(str), - 'minimal_nuisance_correction': { - 'peer_gsr': bool1_1, - 'peer_scrub': bool1_1, - 'scrub_thresh': float, + "PyPEER": { + "run": bool1_1, + "eye_scan_names": Maybe(Any([str], [])), + "data_scan_names": Maybe(Any([str], [])), + "eye_mask_path": Maybe(str), + "stimulus_path": Maybe(str), + "minimal_nuisance_correction": { + "peer_gsr": bool1_1, + "peer_scrub": bool1_1, + "scrub_thresh": float, + }, }, - }, -}) + } +) def schema(config_dict): - '''Validate a pipeline configuration against the latest validation schema + """Validate a pipeline configuration against the latest validation schema by first applying backwards-compatibility patches, then applying Voluptuous validation, then handling complex configuration interaction checks before returning validated config_dict. @@ -1108,106 +1266,140 @@ def schema(config_dict): Returns ------- dict - ''' + """ from CPAC.utils.utils import _changes_1_8_0_to_1_8_1 + try: - partially_validated = latest_schema( - _changes_1_8_0_to_1_8_1(config_dict)) + partially_validated = latest_schema(_changes_1_8_0_to_1_8_1(config_dict)) except MultipleInvalid as multiple_invalid: - if (multiple_invalid.path == ['nuisance_corrections', - '2-nuisance_regression', 'space'] and - isinstance(multiple_invalid.errors[0], CoerceInvalid)): + if multiple_invalid.path == [ + "nuisance_corrections", + "2-nuisance_regression", + "space", + ] and isinstance(multiple_invalid.errors[0], CoerceInvalid): raise CoerceInvalid( 'Nusiance regression space is not forkable. Please choose ' f'only one of {valid_options["space"]}', - path=multiple_invalid.path) from multiple_invalid + path=multiple_invalid.path, + ) from multiple_invalid raise multiple_invalid try: - if (partially_validated['registration_workflows'][ - 'functional_registration' - ]['func_registration_to_template']['apply_transform'][ - 'using' - ] == 'single_step_resampling_from_stc'): - or_else = ('or choose a different option for ' - '``registration_workflows: functional_registration: ' - 'func_registration_to_template: apply_transform: ' - 'using``') - if True in partially_validated['nuisance_corrections'][ - '2-nuisance_regression']['run'] and partially_validated[ - 'nuisance_corrections' - ]['2-nuisance_regression']['space'] != 'template': + if ( + partially_validated["registration_workflows"]["functional_registration"][ + "func_registration_to_template" + ]["apply_transform"]["using"] + == "single_step_resampling_from_stc" + ): + or_else = ( + "or choose a different option for " + "``registration_workflows: functional_registration: " + "func_registration_to_template: apply_transform: " + "using``" + ) + if ( + True + in partially_validated["nuisance_corrections"]["2-nuisance_regression"][ + "run" + ] + and partially_validated["nuisance_corrections"][ + "2-nuisance_regression" + ]["space"] + != "template" + ): raise ExclusiveInvalid( - '``single_step_resampling_from_stc`` requires ' - 'template-space nuisance regression. Either set ' - '``nuisance_corrections: 2-nuisance_regression: space`` ' - f'to ``template`` {or_else}') - if any(registration != 'ANTS' for registration in - partially_validated['registration_workflows'][ - 'anatomical_registration']['registration']['using']): + "``single_step_resampling_from_stc`` requires " + "template-space nuisance regression. Either set " + "``nuisance_corrections: 2-nuisance_regression: space`` " + f"to ``template`` {or_else}" + ) + if any( + registration != "ANTS" + for registration in partially_validated["registration_workflows"][ + "anatomical_registration" + ]["registration"]["using"] + ): raise ExclusiveInvalid( - '``single_step_resampling_from_stc`` requires ' - 'ANTS registration. Either set ' - '``registration_workflows: anatomical_registration: ' - f'registration: using`` to ``ANTS`` {or_else}') + "``single_step_resampling_from_stc`` requires " + "ANTS registration. Either set " + "``registration_workflows: anatomical_registration: " + f"registration: using`` to ``ANTS`` {or_else}" + ) except KeyError: pass try: - motion_filters = partially_validated['functional_preproc'][ - 'motion_estimates_and_correction']['motion_estimate_filter'] - if True in motion_filters['run']: - for motion_filter in motion_filters['filters']: - motion_filter['Name'] = name_motion_filter( - motion_filter, motion_filters['filters']) + motion_filters = partially_validated["functional_preproc"][ + "motion_estimates_and_correction" + ]["motion_estimate_filter"] + if True in motion_filters["run"]: + for motion_filter in motion_filters["filters"]: + motion_filter["Name"] = name_motion_filter( + motion_filter, motion_filters["filters"] + ) else: - motion_filters['filters'] = [] + motion_filters["filters"] = [] except KeyError: pass try: # 'motion_correction.using' is only optional if 'run' is Off - mec = partially_validated['functional_preproc'][ - 'motion_estimates_and_correction'] - if mec['run']: + mec = partially_validated["functional_preproc"][ + "motion_estimates_and_correction" + ] + if mec["run"]: try: # max should be len(valid_options['motion_correction']) # once #1935 is resolved - Length(min=1, max=1)(mec['motion_correction']['using']) + Length(min=1, max=1)(mec["motion_correction"]["using"]) except LengthInvalid: - mec_path = ['functional_preproc', - 'motion_estimates_and_correction'] + mec_path = ["functional_preproc", "motion_estimates_and_correction"] raise LengthInvalid( # pylint: disable=raise-missing-from f'If data[{"][".join(map(repr, mec_path))}][\'run\'] is ' # length must be between 1 and # len(valid_options['motion_correction']) once #1935 is # resolved 'True, length of list must be exactly 1', - path=[*mec_path, 'motion_correction', 'using']) + path=[*mec_path, "motion_correction", "using"], + ) except KeyError: pass try: # Check for mutually exclusive options - if (partially_validated['nuisance_corrections'][ - '2-nuisance_regression']['ingress_regressors']['run'] and - partially_validated['nuisance_corrections'][ - '2-nuisance_regression']['create_regressors']): + if ( + partially_validated["nuisance_corrections"]["2-nuisance_regression"][ + "ingress_regressors" + ]["run"] + and partially_validated["nuisance_corrections"]["2-nuisance_regression"][ + "create_regressors" + ] + ): raise ExclusiveInvalid( "[!] Ingress_regressors and create_regressors can't both run! " - " Try turning one option off.\n ") + " Try turning one option off.\n " + ) except KeyError: pass try: - if not partially_validated.get("skip env check" - ) and 'unet' in [using.lower() for using in - partially_validated['anatomical_preproc'][ - 'brain_extraction']['using']]: + if not partially_validated.get("skip env check") and "unet" in [ + using.lower() + for using in partially_validated["anatomical_preproc"]["brain_extraction"][ + "using" + ] + ]: try: from importlib import import_module - import_module('CPAC.unet') - except (CalledProcessError, ImportError, ModuleNotFoundError, OSError) as error: + + import_module("CPAC.unet") + except ( + CalledProcessError, + ImportError, + ModuleNotFoundError, + OSError, + ) as error: import site + raise OSError( - 'U-Net brain extraction requires torch to be installed, ' - 'but the installation path in this container is ' - 'read-only. Please bind a local writable path to ' + "U-Net brain extraction requires torch to be installed, " + "but the installation path in this container is " + "read-only. Please bind a local writable path to " f'"{site.USER_BASE}" in the container to use U-Net.' ) from error except KeyError: diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index 3988a61f95..c228fc3640 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -1,98 +1,92 @@ import os + import pytest -from CPAC.pipeline.cpac_pipeline import initialize_nipype_wf, \ - load_cpac_pipe_config, \ - connect_pipeline, \ - build_anat_preproc_stack, \ - build_workflow -from CPAC.pipeline.engine import ResourcePool, ingress_raw_anat_data, \ - ingress_raw_func_data, \ - ingress_pipeconfig_paths, initiate_rpool + +from CPAC.pipeline.cpac_pipeline import ( + build_anat_preproc_stack, + build_workflow, + connect_pipeline, + initialize_nipype_wf, + load_cpac_pipe_config, +) +from CPAC.pipeline.engine import ( + ingress_pipeconfig_paths, + ingress_raw_anat_data, + ingress_raw_func_data, + initiate_rpool, + ResourcePool, +) from CPAC.utils.bids_utils import create_cpac_data_config -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_ingress_func_raw_data(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct['subject_id'] - ses_id = sub_data_dct['unique_id'] + part_id = sub_data_dct["subject_id"] + ses_id = sub_data_dct["unique_id"] - unique_id = f'{part_id}_{ses_id}' + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) - if 'func' in sub_data_dct: - wf, rpool, diff, blip, fmap_rp_list = \ - ingress_raw_func_data(wf, rpool, cfg, sub_data_dct, unique_id, - part_id, ses_id) + if "func" in sub_data_dct: + wf, rpool, diff, blip, fmap_rp_list = ingress_raw_func_data( + wf, rpool, cfg, sub_data_dct, unique_id, part_id, ses_id + ) rpool.gather_pipes(wf, cfg, all=True) wf.run() -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_ingress_anat_raw_data(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct['subject_id'] - ses_id = sub_data_dct['unique_id'] + part_id = sub_data_dct["subject_id"] + ses_id = sub_data_dct["unique_id"] - unique_id = f'{part_id}_{ses_id}' + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) - rpool = ingress_raw_anat_data(wf, rpool, cfg, - sub_data_dct, - unique_id, - part_id, ses_id) + rpool = ingress_raw_anat_data( + wf, rpool, cfg, sub_data_dct, unique_id, part_id, ses_id + ) rpool.gather_pipes(wf, cfg, all=True) wf.run() -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') - cfg.pipeline_setup['log_directory']['path'] = \ - os.path.join(test_dir, 'logs') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") + cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct['subject_id'] - ses_id = sub_data_dct['unique_id'] + part_id = sub_data_dct["subject_id"] + ses_id = sub_data_dct["unique_id"] - unique_id = f'{part_id}_{ses_id}' + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) @@ -103,19 +97,14 @@ def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): wf.run() -@pytest.mark.skip(reason='not a pytest test') +@pytest.mark.skip(reason="not a pytest test") def test_build_anat_preproc_stack(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') - cfg.pipeline_setup['log_directory']['path'] = \ - os.path.join(test_dir, 'logs') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") + cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") wf = initialize_nipype_wf(cfg, sub_data_dct) @@ -127,44 +116,39 @@ def test_build_anat_preproc_stack(pipe_config, bids_dir, test_dir): rpool.gather_pipes(wf, cfg) wf.run() - -@pytest.mark.skip(reason='not a pytest test') + +@pytest.mark.skip(reason="not a pytest test") def test_build_workflow(pipe_config, bids_dir, test_dir): - - sub_data_dct = create_cpac_data_config(bids_dir, - skip_bids_validator=True)[0] + sub_data_dct = create_cpac_data_config(bids_dir, skip_bids_validator=True)[0] cfg = load_cpac_pipe_config(pipe_config) - cfg.pipeline_setup['output_directory']['path'] = \ - os.path.join(test_dir, 'out') - cfg.pipeline_setup['working_directory']['path'] = \ - os.path.join(test_dir, 'work') - cfg.pipeline_setup['log_directory']['path'] = \ - os.path.join(test_dir, 'logs') + cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") + cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") + cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") wf = initialize_nipype_wf(cfg, sub_data_dct) wf, rpool = initiate_rpool(wf, cfg, sub_data_dct) - wf, _, _ = build_workflow( - sub_data_dct['subject_id'], sub_data_dct, cfg) + wf, _, _ = build_workflow(sub_data_dct["subject_id"], sub_data_dct, cfg) rpool.gather_pipes(wf, cfg) wf.run() + # bids_dir = "/Users/steven.giavasis/data/HBN-SI_dataset/rawdata" # test_dir = "/test_dir" # cfg = "/Users/hecheng.jin/GitHub/DevBranch/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml" cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml" -bids_dir = '/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis' +bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis" test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc" # test_ingress_func_raw_data(cfg, bids_dir, test_dir) # test_ingress_anat_raw_data(cfg, bids_dir, test_dir) # test_ingress_pipeconfig_data(cfg, bids_dir, test_dir) # test_build_anat_preproc_stack(cfg, bids_dir, test_dir) -if __name__ == '__main__': +if __name__ == "__main__": test_build_workflow(cfg, bids_dir, test_dir) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 5a957bda0d..7410b335f2 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -16,48 +16,58 @@ # License along with C-PAC. If not, see . # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order from typing import Optional -from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock + from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks -from CPAC.registration.utils import seperate_warps_list, \ - check_transforms, \ - generate_inverse_transform_flags, \ - single_ants_xfm_to_list, \ - interpolation_string, \ - change_itk_transform_type, \ - hardcoded_reg, \ - one_d_to_mat, \ - run_c3d, \ - run_c4d +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.nodeblock import nodeblock +from CPAC.registration.utils import ( + change_itk_transform_type, + check_transforms, + generate_inverse_transform_flags, + hardcoded_reg, + interpolation_string, + one_d_to_mat, + run_c3d, + run_c4d, + seperate_warps_list, + single_ants_xfm_to_list, +) from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.typing import LIST_OR_STR, TUPLE from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool -def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, - num_cpus=1, num_ants_cores=1): - +def apply_transform( + wf_name, + reg_tool, + time_series=False, + multi_input=False, + num_cpus=1, + num_ants_cores=1, +): if not reg_tool: - raise Exception("\n[!] Developer info: the 'reg_tool' parameter sent " - f"to the 'apply_transform' node for '{wf_name}' is " - f"empty.\n") + raise Exception( + "\n[!] Developer info: the 'reg_tool' parameter sent " + f"to the 'apply_transform' node for '{wf_name}' is " + f"empty.\n" + ) wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_image', - 'reference', - 'transform', - 'interpolation']), - name='inputspec') + util.IdentityInterface( + fields=["input_image", "reference", "transform", "interpolation"] + ), + name="inputspec", + ) outputNode = pe.Node( - util.IdentityInterface(fields=['output_image']), - name='outputspec') + util.IdentityInterface(fields=["output_image"]), name="outputspec" + ) if int(num_cpus) > 1 and time_series: # parallelize time series warp application @@ -65,23 +75,22 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, # time series chunks multi_input = True - if reg_tool == 'ants': - + if reg_tool == "ants": if multi_input: - apply_warp = pe.MapNode(interface=ants.ApplyTransforms(), - name=f'apply_warp_{wf_name}', - iterfield=['input_image'], - mem_gb=0.7, - mem_x=(1708448960473801 / - 151115727451828646838272, - 'input_image')) + apply_warp = pe.MapNode( + interface=ants.ApplyTransforms(), + name=f"apply_warp_{wf_name}", + iterfield=["input_image"], + mem_gb=0.7, + mem_x=(1708448960473801 / 151115727451828646838272, "input_image"), + ) else: - apply_warp = pe.Node(interface=ants.ApplyTransforms(), - name=f'apply_warp_{wf_name}', - mem_gb=0.7, - mem_x=(1708448960473801 / - 151115727451828646838272, - 'input_image')) + apply_warp = pe.Node( + interface=ants.ApplyTransforms(), + name=f"apply_warp_{wf_name}", + mem_gb=0.7, + mem_x=(1708448960473801 / 151115727451828646838272, "input_image"), + ) apply_warp.inputs.dimension = 3 apply_warp.interface.num_threads = int(num_ants_cores) @@ -89,275 +98,312 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, if time_series: apply_warp.inputs.input_image_type = 3 - wf.connect(inputNode, 'reference', apply_warp, 'reference_image') + wf.connect(inputNode, "reference", apply_warp, "reference_image") - interp_string = pe.Node(util.Function(input_names=['interpolation', - 'reg_tool'], - output_names=['interpolation'], - function=interpolation_string), - name=f'interp_string', - mem_gb=2.5) + interp_string = pe.Node( + util.Function( + input_names=["interpolation", "reg_tool"], + output_names=["interpolation"], + function=interpolation_string, + ), + name="interp_string", + mem_gb=2.5, + ) interp_string.inputs.reg_tool = reg_tool - wf.connect(inputNode, 'interpolation', interp_string, 'interpolation') - wf.connect(interp_string, 'interpolation', - apply_warp, 'interpolation') - - ants_xfm_list = \ - pe.Node(util.Function(input_names=['transform'], - output_names=['transform_list'], - function=single_ants_xfm_to_list), - name=f'single_ants_xfm_to_list', - mem_gb=2.5) + wf.connect(inputNode, "interpolation", interp_string, "interpolation") + wf.connect(interp_string, "interpolation", apply_warp, "interpolation") + + ants_xfm_list = pe.Node( + util.Function( + input_names=["transform"], + output_names=["transform_list"], + function=single_ants_xfm_to_list, + ), + name="single_ants_xfm_to_list", + mem_gb=2.5, + ) - wf.connect(inputNode, 'transform', ants_xfm_list, 'transform') - wf.connect(ants_xfm_list, 'transform_list', apply_warp, 'transforms') + wf.connect(inputNode, "transform", ants_xfm_list, "transform") + wf.connect(ants_xfm_list, "transform_list", apply_warp, "transforms") # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: + chunk_imports = ["import nibabel as nb"] + chunk = pe.Node( + util.Function( + input_names=["func_file", "n_chunks", "chunk_size"], + output_names=["TR_ranges"], + function=chunk_ts, + imports=chunk_imports, + ), + name=f"chunk_{wf_name}", + mem_gb=2.5, + ) - chunk_imports = ['import nibabel as nb'] - chunk = pe.Node(util.Function(input_names=['func_file', - 'n_chunks', - 'chunk_size'], - output_names=['TR_ranges'], - function=chunk_ts, - imports=chunk_imports), - name=f'chunk_{wf_name}', - mem_gb=2.5) - - #chunk.inputs.n_chunks = int(num_cpus) + # chunk.inputs.n_chunks = int(num_cpus) # 10-TR sized chunks chunk.inputs.chunk_size = 10 - wf.connect(inputNode, 'input_image', chunk, 'func_file') - - split_imports = ['import os', 'import subprocess'] - split = pe.Node(util.Function(input_names=['func_file', - 'tr_ranges'], - output_names=['split_funcs'], - function=split_ts_chunks, - imports=split_imports), - name=f'split_{wf_name}', - mem_gb=2.5) + wf.connect(inputNode, "input_image", chunk, "func_file") + + split_imports = ["import os", "import subprocess"] + split = pe.Node( + util.Function( + input_names=["func_file", "tr_ranges"], + output_names=["split_funcs"], + function=split_ts_chunks, + imports=split_imports, + ), + name=f"split_{wf_name}", + mem_gb=2.5, + ) - wf.connect(inputNode, 'input_image', split, 'func_file') - wf.connect(chunk, 'TR_ranges', split, 'tr_ranges') + wf.connect(inputNode, "input_image", split, "func_file") + wf.connect(chunk, "TR_ranges", split, "tr_ranges") - wf.connect(split, 'split_funcs', apply_warp, 'input_image') + wf.connect(split, "split_funcs", apply_warp, "input_image") - func_concat = pe.Node(interface=afni_utils.TCat(), - name=f'func_concat_{wf_name}', - mem_gb=2.5) - func_concat.inputs.outputtype = 'NIFTI_GZ' + func_concat = pe.Node( + interface=afni_utils.TCat(), name=f"func_concat_{wf_name}", mem_gb=2.5 + ) + func_concat.inputs.outputtype = "NIFTI_GZ" - wf.connect(apply_warp, 'output_image', func_concat, 'in_files') + wf.connect(apply_warp, "output_image", func_concat, "in_files") - wf.connect(func_concat, 'out_file', outputNode, 'output_image') + wf.connect(func_concat, "out_file", outputNode, "output_image") else: - wf.connect(inputNode, 'input_image', apply_warp, 'input_image') - wf.connect(apply_warp, 'output_image', outputNode, 'output_image') - - elif reg_tool == 'fsl': + wf.connect(inputNode, "input_image", apply_warp, "input_image") + wf.connect(apply_warp, "output_image", outputNode, "output_image") + elif reg_tool == "fsl": if multi_input: - apply_warp = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'fsl_apply_warp', - iterfield=['in_file'], - mem_gb=2.5) + apply_warp = pe.MapNode( + interface=fsl.ApplyWarp(), + name="fsl_apply_warp", + iterfield=["in_file"], + mem_gb=2.5, + ) else: - apply_warp = pe.Node(interface=fsl.ApplyWarp(), - name='fsl_apply_warp', - mem_gb=2.5) - - interp_string = pe.Node(util.Function(input_names=['interpolation', - 'reg_tool'], - output_names=['interpolation'], - function=interpolation_string), - name=f'interp_string', - mem_gb=2.5) + apply_warp = pe.Node( + interface=fsl.ApplyWarp(), name="fsl_apply_warp", mem_gb=2.5 + ) + + interp_string = pe.Node( + util.Function( + input_names=["interpolation", "reg_tool"], + output_names=["interpolation"], + function=interpolation_string, + ), + name="interp_string", + mem_gb=2.5, + ) interp_string.inputs.reg_tool = reg_tool - wf.connect(inputNode, 'interpolation', interp_string, 'interpolation') - wf.connect(interp_string, 'interpolation', apply_warp, 'interp') + wf.connect(inputNode, "interpolation", interp_string, "interpolation") + wf.connect(interp_string, "interpolation", apply_warp, "interp") # mni to t1 - wf.connect(inputNode, 'reference', apply_warp, 'ref_file') + wf.connect(inputNode, "reference", apply_warp, "ref_file") # NOTE: C-PAC now converts all FSL xfm's to .nii, so even if the # inputNode 'transform' is a linear xfm, it's a .nii and must # go in as a warpfield file - wf.connect(inputNode, 'transform', apply_warp, 'field_file') + wf.connect(inputNode, "transform", apply_warp, "field_file") # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: + chunk_imports = ["import nibabel as nb"] + chunk = pe.Node( + util.Function( + input_names=["func_file", "n_chunks", "chunk_size"], + output_names=["TR_ranges"], + function=chunk_ts, + imports=chunk_imports, + ), + name=f"chunk_{wf_name}", + mem_gb=2.5, + ) - chunk_imports = ['import nibabel as nb'] - chunk = pe.Node(util.Function(input_names=['func_file', - 'n_chunks', - 'chunk_size'], - output_names=['TR_ranges'], - function=chunk_ts, - imports=chunk_imports), - name=f'chunk_{wf_name}', - mem_gb=2.5) - - #chunk.inputs.n_chunks = int(num_cpus) + # chunk.inputs.n_chunks = int(num_cpus) # 10-TR sized chunks chunk.inputs.chunk_size = 10 - wf.connect(inputNode, 'input_image', chunk, 'func_file') - - split_imports = ['import os', 'import subprocess'] - split = pe.Node(util.Function(input_names=['func_file', - 'tr_ranges'], - output_names=['split_funcs'], - function=split_ts_chunks, - imports=split_imports), - name=f'split_{wf_name}', - mem_gb=2.5) + wf.connect(inputNode, "input_image", chunk, "func_file") + + split_imports = ["import os", "import subprocess"] + split = pe.Node( + util.Function( + input_names=["func_file", "tr_ranges"], + output_names=["split_funcs"], + function=split_ts_chunks, + imports=split_imports, + ), + name=f"split_{wf_name}", + mem_gb=2.5, + ) - wf.connect(inputNode, 'input_image', split, 'func_file') - wf.connect(chunk, 'TR_ranges', split, 'tr_ranges') + wf.connect(inputNode, "input_image", split, "func_file") + wf.connect(chunk, "TR_ranges", split, "tr_ranges") - wf.connect(split, 'split_funcs', apply_warp, 'in_file') + wf.connect(split, "split_funcs", apply_warp, "in_file") - func_concat = pe.Node(interface=afni_utils.TCat(), - name=f'func_concat{wf_name}') - func_concat.inputs.outputtype = 'NIFTI_GZ' + func_concat = pe.Node( + interface=afni_utils.TCat(), name=f"func_concat{wf_name}" + ) + func_concat.inputs.outputtype = "NIFTI_GZ" - wf.connect(apply_warp, 'out_file', func_concat, 'in_files') + wf.connect(apply_warp, "out_file", func_concat, "in_files") - wf.connect(func_concat, 'out_file', outputNode, 'output_image') + wf.connect(func_concat, "out_file", outputNode, "output_image") else: - wf.connect(inputNode, 'input_image', apply_warp, 'in_file') - wf.connect(apply_warp, 'out_file', outputNode, 'output_image') + wf.connect(inputNode, "input_image", apply_warp, "in_file") + wf.connect(apply_warp, "out_file", outputNode, "output_image") return wf -def transform_derivative(wf_name, label, reg_tool, num_cpus, num_ants_cores, - ants_interp=None, fsl_interp=None, opt=None): - '''Transform output derivatives to template space. +def transform_derivative( + wf_name, + label, + reg_tool, + num_cpus, + num_ants_cores, + ants_interp=None, + fsl_interp=None, + opt=None, +): + """Transform output derivatives to template space. This function is designed for use with the NodeBlock connection engine. - ''' - + """ wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['in_file', - 'reference', - 'transform']), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface(fields=["in_file", "reference", "transform"]), + name="inputspec", + ) multi_input = False - if 'statmap' in label: + if "statmap" in label: multi_input = True stack = False - if 'correlations' in label: + if "correlations" in label: stack = True - apply_xfm = apply_transform(f'warp_{label}_to_template', reg_tool, - time_series=stack, - multi_input=multi_input, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_{label}_to_template", + reg_tool, + time_series=stack, + multi_input=multi_input, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = ants_interp - elif reg_tool == 'fsl': + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = fsl_interp - wf.connect(inputnode, 'in_file', apply_xfm, 'inputspec.input_image') - wf.connect(inputnode, 'reference', apply_xfm, 'inputspec.reference') - wf.connect(inputnode, 'transform', apply_xfm, 'inputspec.transform') + wf.connect(inputnode, "in_file", apply_xfm, "inputspec.input_image") + wf.connect(inputnode, "reference", apply_xfm, "inputspec.reference") + wf.connect(inputnode, "transform", apply_xfm, "inputspec.transform") - outputnode = pe.Node(util.IdentityInterface(fields=['out_file']), - name='outputspec') + outputnode = pe.Node(util.IdentityInterface(fields=["out_file"]), name="outputspec") - wf.connect(apply_xfm, 'outputspec.output_image', outputnode, 'out_file') + wf.connect(apply_xfm, "outputspec.output_image", outputnode, "out_file") return wf -def convert_pedir(pedir, convert='xyz_to_int'): - '''FSL Flirt requires pedir input encoded as an int''' - if convert == 'xyz_to_int': - conv_dct = {'x': 1, 'y': 2, 'z': 3, 'x-': -1, 'y-': -2, 'z-': -3, - 'i': 1, 'j': 2, 'k': 3, 'i-': -1, 'j-': -2, 'k-': -3, - '-x': -1, '-i': -1, '-y': -2, - '-j': -2, '-z': -3, '-k': -3} - elif convert == 'ijk_to_xyz': - conv_dct = {'i': 'x', 'j': 'y', 'k': 'z', - 'i-': 'x-', 'j-': 'y-', 'k-': 'z-'} +def convert_pedir(pedir, convert="xyz_to_int"): + """FSL Flirt requires pedir input encoded as an int""" + if convert == "xyz_to_int": + conv_dct = { + "x": 1, + "y": 2, + "z": 3, + "x-": -1, + "y-": -2, + "z-": -3, + "i": 1, + "j": 2, + "k": 3, + "i-": -1, + "j-": -2, + "k-": -3, + "-x": -1, + "-i": -1, + "-y": -2, + "-j": -2, + "-z": -3, + "-k": -3, + } + elif convert == "ijk_to_xyz": + conv_dct = {"i": "x", "j": "y", "k": "z", "i-": "x-", "j-": "y-", "k-": "z-"} if isinstance(pedir, bytes): pedir = pedir.decode() if not isinstance(pedir, str): - raise Exception("\n\nPhase-encoding direction must be a " - "string value.\n\nValue: {0}" - "\n\n".format(pedir)) + raise Exception( + "\n\nPhase-encoding direction must be a " + f"string value.\n\nValue: {pedir}" + "\n\n" + ) if pedir not in conv_dct.keys(): - raise Exception("\n\nInvalid phase-encoding direction " - "entered: {0}\n\n".format(pedir)) + raise Exception("\n\nInvalid phase-encoding direction " f"entered: {pedir}\n\n") pedir = conv_dct[pedir] return pedir -def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): - +def create_fsl_flirt_linear_reg(name="fsl_flirt_linear_reg"): linear_register = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['input_brain', - 'reference_brain', - 'interp', - 'ref_mask']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=["input_brain", "reference_brain", "interp", "ref_mask"] + ), + name="inputspec", + ) - outputspec = pe.Node(util.IdentityInterface(fields=['output_brain', - 'linear_xfm', - 'invlinear_xfm']), - name='outputspec') + outputspec = pe.Node( + util.IdentityInterface(fields=["output_brain", "linear_xfm", "invlinear_xfm"]), + name="outputspec", + ) - linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') - linear_reg.inputs.cost = 'corratio' + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_reg_0") + linear_reg.inputs.cost = "corratio" - inv_flirt_xfm = pe.Node(interface=fsl.utils.ConvertXFM(), - name='inv_linear_reg0_xfm') + inv_flirt_xfm = pe.Node( + interface=fsl.utils.ConvertXFM(), name="inv_linear_reg0_xfm" + ) inv_flirt_xfm.inputs.invert_xfm = True - linear_register.connect(inputspec, 'input_brain', - linear_reg, 'in_file') + linear_register.connect(inputspec, "input_brain", linear_reg, "in_file") - linear_register.connect(inputspec, 'reference_brain', - linear_reg, 'reference') + linear_register.connect(inputspec, "reference_brain", linear_reg, "reference") - linear_register.connect(inputspec, 'interp', - linear_reg, 'interp') + linear_register.connect(inputspec, "interp", linear_reg, "interp") - linear_register.connect(linear_reg, 'out_file', - outputspec, 'output_brain') + linear_register.connect(linear_reg, "out_file", outputspec, "output_brain") - linear_register.connect(linear_reg, 'out_matrix_file', - inv_flirt_xfm, 'in_file') + linear_register.connect(linear_reg, "out_matrix_file", inv_flirt_xfm, "in_file") - linear_register.connect(inv_flirt_xfm, 'out_file', - outputspec, 'invlinear_xfm') + linear_register.connect(inv_flirt_xfm, "out_file", outputspec, "invlinear_xfm") - linear_register.connect(linear_reg, 'out_matrix_file', - outputspec, 'linear_xfm') + linear_register.connect(linear_reg, "out_matrix_file", outputspec, "linear_xfm") return linear_register -def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): +def create_fsl_fnirt_nonlinear_reg(name="fsl_fnirt_nonlinear_reg"): """ Performs non-linear registration of an input file to a reference file using FSL FNIRT. @@ -373,7 +419,6 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): Notes ----- - Workflow Inputs:: inputspec.input_skull : string (nifti file) @@ -408,71 +453,68 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): .. image:: ../images/nonlinear_register_detailed.dot.png :width: 500 """ - nonlinear_register = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['input_brain', - 'input_skull', - 'reference_brain', - 'reference_skull', - 'interp', - 'ref_mask', - 'linear_aff', - 'fnirt_config']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "input_brain", + "input_skull", + "reference_brain", + "reference_skull", + "interp", + "ref_mask", + "linear_aff", + "fnirt_config", + ] + ), + name="inputspec", + ) - outputspec = pe.Node(util.IdentityInterface(fields=['output_brain', - 'nonlinear_xfm']), - name='outputspec') + outputspec = pe.Node( + util.IdentityInterface(fields=["output_brain", "nonlinear_xfm"]), + name="outputspec", + ) - nonlinear_reg = pe.Node(interface=fsl.FNIRT(), - name='nonlinear_reg_1') + nonlinear_reg = pe.Node(interface=fsl.FNIRT(), name="nonlinear_reg_1") nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True - brain_warp = pe.Node(interface=fsl.ApplyWarp(), - name='brain_warp') + brain_warp = pe.Node(interface=fsl.ApplyWarp(), name="brain_warp") - nonlinear_register.connect(inputspec, 'input_skull', - nonlinear_reg, 'in_file') + nonlinear_register.connect(inputspec, "input_skull", nonlinear_reg, "in_file") - nonlinear_register.connect(inputspec, 'reference_skull', - nonlinear_reg, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", nonlinear_reg, "ref_file") - nonlinear_register.connect(inputspec, 'interp', - brain_warp, 'interp') + nonlinear_register.connect(inputspec, "interp", brain_warp, "interp") - nonlinear_register.connect(inputspec, 'ref_mask', - nonlinear_reg, 'refmask_file') + nonlinear_register.connect(inputspec, "ref_mask", nonlinear_reg, "refmask_file") # FNIRT parameters are specified by FSL config file # ${FSLDIR}/etc/flirtsch/TI_2_MNI152_2mm.cnf (or user-specified) - nonlinear_register.connect(inputspec, 'fnirt_config', - nonlinear_reg, 'config_file') + nonlinear_register.connect(inputspec, "fnirt_config", nonlinear_reg, "config_file") - nonlinear_register.connect(inputspec, 'linear_aff', - nonlinear_reg, 'affine_file') + nonlinear_register.connect(inputspec, "linear_aff", nonlinear_reg, "affine_file") - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - outputspec, 'nonlinear_xfm') + nonlinear_register.connect( + nonlinear_reg, "fieldcoeff_file", outputspec, "nonlinear_xfm" + ) - nonlinear_register.connect(inputspec, 'input_brain', - brain_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", brain_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - brain_warp, 'field_file') + nonlinear_register.connect( + nonlinear_reg, "fieldcoeff_file", brain_warp, "field_file" + ) - nonlinear_register.connect(inputspec, 'reference_brain', - brain_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_brain", brain_warp, "ref_file") - nonlinear_register.connect(brain_warp, 'out_file', - outputspec, 'output_brain') + nonlinear_register.connect(brain_warp, "out_file", outputspec, "output_brain") return nonlinear_register -def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): +def create_fsl_fnirt_nonlinear_reg_nhp(name="fsl_fnirt_nonlinear_reg_nhp"): """ Performs non-linear registration of an input file to a reference file using FSL FNIRT. @@ -488,7 +530,6 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): Notes ----- - Workflow Inputs:: inputspec.input_skull : string (nifti file) @@ -525,129 +566,120 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): .. image:: ../images/nonlinear_register_detailed.dot.png :width: 500 """ - nonlinear_register = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['input_brain', - 'input_skull', - 'reference_brain', - 'reference_skull', - 'interp', - 'ref_mask', - 'linear_aff', - 'fnirt_config']), - name='inputspec') - - outputspec = pe.Node(util.IdentityInterface(fields=['output_brain', - 'output_head', - 'output_mask', - 'output_biasfield', - 'nonlinear_xfm', - 'nonlinear_warp']), - name='outputspec') - - nonlinear_reg = pe.Node(interface=fsl.FNIRT(), - name='nonlinear_reg_1') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "input_brain", + "input_skull", + "reference_brain", + "reference_skull", + "interp", + "ref_mask", + "linear_aff", + "fnirt_config", + ] + ), + name="inputspec", + ) + + outputspec = pe.Node( + util.IdentityInterface( + fields=[ + "output_brain", + "output_head", + "output_mask", + "output_biasfield", + "nonlinear_xfm", + "nonlinear_warp", + ] + ), + name="outputspec", + ) + + nonlinear_reg = pe.Node(interface=fsl.FNIRT(), name="nonlinear_reg_1") nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True nonlinear_reg.inputs.field_file = True - nonlinear_register.connect(inputspec, 'input_skull', - nonlinear_reg, 'in_file') + nonlinear_register.connect(inputspec, "input_skull", nonlinear_reg, "in_file") - nonlinear_register.connect(inputspec, 'reference_skull', - nonlinear_reg, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", nonlinear_reg, "ref_file") - nonlinear_register.connect(inputspec, 'ref_mask', - nonlinear_reg, 'refmask_file') + nonlinear_register.connect(inputspec, "ref_mask", nonlinear_reg, "refmask_file") - nonlinear_register.connect(inputspec, 'fnirt_config', - nonlinear_reg, 'config_file') + nonlinear_register.connect(inputspec, "fnirt_config", nonlinear_reg, "config_file") - nonlinear_register.connect(inputspec, 'linear_aff', - nonlinear_reg, 'affine_file') + nonlinear_register.connect(inputspec, "linear_aff", nonlinear_reg, "affine_file") - brain_warp = pe.Node(interface=fsl.ApplyWarp(), - name='brain_warp') - brain_warp.inputs.interp = 'nn' + brain_warp = pe.Node(interface=fsl.ApplyWarp(), name="brain_warp") + brain_warp.inputs.interp = "nn" brain_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - brain_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", brain_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - brain_warp, 'field_file') + nonlinear_register.connect(nonlinear_reg, "field_file", brain_warp, "field_file") - nonlinear_register.connect(inputspec, 'reference_skull', - brain_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", brain_warp, "ref_file") - head_warp = pe.Node(interface=fsl.ApplyWarp(), - name='head_warp') - head_warp.inputs.interp = 'spline' + head_warp = pe.Node(interface=fsl.ApplyWarp(), name="head_warp") + head_warp.inputs.interp = "spline" head_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - head_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", head_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - head_warp, 'field_file') + nonlinear_register.connect(nonlinear_reg, "field_file", head_warp, "field_file") - nonlinear_register.connect(inputspec, 'reference_skull', - head_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", head_warp, "ref_file") - mask_warp = pe.Node(interface=fsl.ApplyWarp(), - name='mask_warp') - mask_warp.inputs.interp = 'nn' + mask_warp = pe.Node(interface=fsl.ApplyWarp(), name="mask_warp") + mask_warp.inputs.interp = "nn" mask_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - mask_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", mask_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - mask_warp, 'field_file') + nonlinear_register.connect(nonlinear_reg, "field_file", mask_warp, "field_file") - nonlinear_register.connect(inputspec, 'reference_skull', - mask_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", mask_warp, "ref_file") - biasfield_warp = pe.Node(interface=fsl.ApplyWarp(), - name='biasfield_warp') - biasfield_warp.inputs.interp = 'spline' + biasfield_warp = pe.Node(interface=fsl.ApplyWarp(), name="biasfield_warp") + biasfield_warp.inputs.interp = "spline" biasfield_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - biasfield_warp, 'in_file') + nonlinear_register.connect(inputspec, "input_brain", biasfield_warp, "in_file") - nonlinear_register.connect(nonlinear_reg, 'field_file', - biasfield_warp, 'field_file') + nonlinear_register.connect( + nonlinear_reg, "field_file", biasfield_warp, "field_file" + ) - nonlinear_register.connect(inputspec, 'reference_skull', - biasfield_warp, 'ref_file') + nonlinear_register.connect(inputspec, "reference_skull", biasfield_warp, "ref_file") - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - outputspec, 'nonlinear_xfm') + nonlinear_register.connect( + nonlinear_reg, "fieldcoeff_file", outputspec, "nonlinear_xfm" + ) - nonlinear_register.connect(nonlinear_reg, 'field_file', - outputspec, 'nonlinear_warp') + nonlinear_register.connect( + nonlinear_reg, "field_file", outputspec, "nonlinear_warp" + ) - nonlinear_register.connect(brain_warp, 'out_file', - outputspec, 'output_brain') + nonlinear_register.connect(brain_warp, "out_file", outputspec, "output_brain") - nonlinear_register.connect(head_warp, 'out_file', - outputspec, 'output_head') + nonlinear_register.connect(head_warp, "out_file", outputspec, "output_head") - nonlinear_register.connect(mask_warp, 'out_file', - outputspec, 'output_mask') + nonlinear_register.connect(mask_warp, "out_file", outputspec, "output_mask") - nonlinear_register.connect(biasfield_warp, 'out_file', - outputspec, 'output_biasfield') + nonlinear_register.connect( + biasfield_warp, "out_file", outputspec, "output_biasfield" + ) return nonlinear_register -def create_register_func_to_anat(config, phase_diff_distcor=False, - name='register_func_to_anat'): - +def create_register_func_to_anat( + config, phase_diff_distcor=False, name="register_func_to_anat" +): """ Registers a functional scan in native space to anatomical space using a linear transform and does not include bbregister. @@ -668,7 +700,6 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, Notes ----- - Workflow Inputs:: inputspec.func : string (nifti file) @@ -688,77 +719,93 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, """ register_func_to_anat = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['func', - 'anat', - 'dof', - 'interp', - 'fieldmap', - 'fieldmapmask']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=["func", "anat", "dof", "interp", "fieldmap", "fieldmapmask"] + ), + name="inputspec", + ) inputNode_echospacing = pe.Node( - util.IdentityInterface(fields=['echospacing']), - name='echospacing_input') - - inputNode_pedir = pe.Node(util.IdentityInterface(fields=['pedir']), - name='pedir_input') + util.IdentityInterface(fields=["echospacing"]), name="echospacing_input" + ) - outputspec = pe.Node(util.IdentityInterface( - fields=['func_to_anat_linear_xfm_nobbreg', 'anat_func_nobbreg']), - name='outputspec') + inputNode_pedir = pe.Node( + util.IdentityInterface(fields=["pedir"]), name="pedir_input" + ) - linear_reg = pe.Node(interface=fsl.FLIRT(), - name='linear_func_to_anat') + outputspec = pe.Node( + util.IdentityInterface( + fields=["func_to_anat_linear_xfm_nobbreg", "anat_func_nobbreg"] + ), + name="outputspec", + ) - linear_reg.inputs.interp = config.registration_workflows['functional_registration']['coregistration']['interpolation'] - linear_reg.inputs.cost = config.registration_workflows['functional_registration']['coregistration']['cost'] - linear_reg.inputs.dof = config.registration_workflows['functional_registration']['coregistration']['dof'] - if config.registration_workflows['functional_registration']['coregistration']['arguments'] is not None: - linear_reg.inputs.args = config.registration_workflows['functional_registration']['coregistration']['arguments'] + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_func_to_anat") + + linear_reg.inputs.interp = config.registration_workflows["functional_registration"][ + "coregistration" + ]["interpolation"] + linear_reg.inputs.cost = config.registration_workflows["functional_registration"][ + "coregistration" + ]["cost"] + linear_reg.inputs.dof = config.registration_workflows["functional_registration"][ + "coregistration" + ]["dof"] + if ( + config.registration_workflows["functional_registration"]["coregistration"][ + "arguments" + ] + is not None + ): + linear_reg.inputs.args = config.registration_workflows[ + "functional_registration" + ]["coregistration"]["arguments"] if phase_diff_distcor: - conv_pedir = \ - pe.Node(interface=util.Function(input_names=['pedir', - 'convert'], - output_names=['pedir'], - function=convert_pedir), - name='coreg_convert_pedir') - conv_pedir.inputs.convert = 'xyz_to_int' + conv_pedir = pe.Node( + interface=util.Function( + input_names=["pedir", "convert"], + output_names=["pedir"], + function=convert_pedir, + ), + name="coreg_convert_pedir", + ) + conv_pedir.inputs.convert = "xyz_to_int" - register_func_to_anat.connect(inputNode_pedir, 'pedir', - conv_pedir, 'pedir') - register_func_to_anat.connect(conv_pedir, 'pedir', - linear_reg, 'pedir') - register_func_to_anat.connect(inputspec, 'fieldmap', - linear_reg, 'fieldmap') - register_func_to_anat.connect(inputspec, 'fieldmapmask', - linear_reg, 'fieldmapmask') - register_func_to_anat.connect(inputNode_echospacing, 'echospacing', - linear_reg, 'echospacing') + register_func_to_anat.connect(inputNode_pedir, "pedir", conv_pedir, "pedir") + register_func_to_anat.connect(conv_pedir, "pedir", linear_reg, "pedir") + register_func_to_anat.connect(inputspec, "fieldmap", linear_reg, "fieldmap") + register_func_to_anat.connect( + inputspec, "fieldmapmask", linear_reg, "fieldmapmask" + ) + register_func_to_anat.connect( + inputNode_echospacing, "echospacing", linear_reg, "echospacing" + ) - register_func_to_anat.connect(inputspec, 'func', linear_reg, 'in_file') + register_func_to_anat.connect(inputspec, "func", linear_reg, "in_file") - register_func_to_anat.connect(inputspec, 'anat', linear_reg, 'reference') + register_func_to_anat.connect(inputspec, "anat", linear_reg, "reference") - register_func_to_anat.connect(inputspec, 'dof', linear_reg, 'dof') + register_func_to_anat.connect(inputspec, "dof", linear_reg, "dof") - register_func_to_anat.connect(inputspec, 'interp', linear_reg, 'interp') + register_func_to_anat.connect(inputspec, "interp", linear_reg, "interp") - register_func_to_anat.connect(linear_reg, 'out_matrix_file', - outputspec, - 'func_to_anat_linear_xfm_nobbreg') + register_func_to_anat.connect( + linear_reg, "out_matrix_file", outputspec, "func_to_anat_linear_xfm_nobbreg" + ) - register_func_to_anat.connect(linear_reg, 'out_file', - outputspec, 'anat_func_nobbreg') + register_func_to_anat.connect( + linear_reg, "out_file", outputspec, "anat_func_nobbreg" + ) return register_func_to_anat -def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_T2'): +def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_T2"): # for monkey data # ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 # https://github.com/HechengJin0/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 - """ Registers a functional scan in native space to anatomical space using a linear transform and does not include bbregister, use T1 and T2 image. @@ -776,7 +823,6 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ Notes ----- - Workflow Inputs:: inputspec.func : string (nifti file) @@ -791,109 +837,130 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ outputspec.anat_func_nobbreg : string (nifti file) Functional scan registered to anatomical space """ - - register_func_to_anat_use_T2 = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['func', - 'T1_brain', - 'T2_head', - 'T2_brain']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface(fields=["func", "T1_brain", "T2_head", "T2_brain"]), + name="inputspec", + ) - outputspec = pe.Node(util.IdentityInterface(fields=['func_to_anat_linear_xfm_nobbreg', - 'func_to_anat_linear_warp_nobbreg', - 'anat_func_nobbreg']), - name='outputspec') + outputspec = pe.Node( + util.IdentityInterface( + fields=[ + "func_to_anat_linear_xfm_nobbreg", + "func_to_anat_linear_warp_nobbreg", + "anat_func_nobbreg", + ] + ), + name="outputspec", + ) # ${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${fMRIFolder}/${ScoutName}_gdc -ref ${T1wFolder}/${T2wRestoreImage} -omat "$fMRIFolder"/Scout2T2w.mat -out ${fMRIFolder}/Scout2T2w.nii.gz -searchrx -30 30 -searchry -30 30 -searchrz -30 30 -cost mutualinfo - linear_reg_func_to_t2 = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_func_to_t2') - linear_reg_func_to_t2.inputs.interp = 'spline' - linear_reg_func_to_t2.inputs.cost = 'mutualinfo' + linear_reg_func_to_t2 = pe.Node(interface=fsl.FLIRT(), name="linear_reg_func_to_t2") + linear_reg_func_to_t2.inputs.interp = "spline" + linear_reg_func_to_t2.inputs.cost = "mutualinfo" linear_reg_func_to_t2.inputs.dof = 6 linear_reg_func_to_t2.inputs.searchr_x = [30, 30] linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] - register_func_to_anat_use_T2.connect(inputspec, 'func', linear_reg_func_to_t2, 'in_file') + register_func_to_anat_use_T2.connect( + inputspec, "func", linear_reg_func_to_t2, "in_file" + ) - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', linear_reg_func_to_t2, 'reference') + register_func_to_anat_use_T2.connect( + inputspec, "T2_head", linear_reg_func_to_t2, "reference" + ) # ${FSLDIR}/bin/convert_xfm -omat "$fMRIFolder"/T2w2Scout.mat -inverse "$fMRIFolder"/Scout2T2w.mat - invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') + invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") invt.inputs.invert_xfm = True - register_func_to_anat_use_T2.connect(linear_reg_func_to_t2, 'out_matrix_file', invt, 'in_file') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t2, "out_matrix_file", invt, "in_file" + ) # ${FSLDIR}/bin/applywarp --interp=nn -i ${T1wFolder}/${T2wRestoreImageBrain} -r ${fMRIFolder}/${ScoutName}_gdc --premat="$fMRIFolder"/T2w2Scout.mat -o ${fMRIFolder}/Scout_brain_mask.nii.gz - anat_to_func = pe.Node(interface=fsl.ApplyWarp(), - name='anat_to_func') - anat_to_func.inputs.interp = 'nn' + anat_to_func = pe.Node(interface=fsl.ApplyWarp(), name="anat_to_func") + anat_to_func.inputs.interp = "nn" - register_func_to_anat_use_T2.connect(inputspec, 'T2_brain', anat_to_func, 'in_file') - register_func_to_anat_use_T2.connect(inputspec, 'func', anat_to_func, 'ref_file') - register_func_to_anat_use_T2.connect(invt, 'out_file', anat_to_func, 'premat') + register_func_to_anat_use_T2.connect(inputspec, "T2_brain", anat_to_func, "in_file") + register_func_to_anat_use_T2.connect(inputspec, "func", anat_to_func, "ref_file") + register_func_to_anat_use_T2.connect(invt, "out_file", anat_to_func, "premat") # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/Scout_brain_mask.nii.gz -bin ${fMRIFolder}/Scout_brain_mask.nii.gz - func_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'func_brain_mask') - func_brain_mask.inputs.args = '-bin' + func_brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name="func_brain_mask" + ) + func_brain_mask.inputs.args = "-bin" - register_func_to_anat_use_T2.connect(anat_to_func, 'out_file', func_brain_mask, 'in_file') + register_func_to_anat_use_T2.connect( + anat_to_func, "out_file", func_brain_mask, "in_file" + ) # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/${ScoutName}_gdc -mas ${fMRIFolder}/Scout_brain_mask.nii.gz ${fMRIFolder}/Scout_brain_dc.nii.gz - func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name='func_brain') + func_brain = pe.Node(interface=fsl.MultiImageMaths(), name="func_brain") func_brain.inputs.op_string = "-mas %s " - register_func_to_anat_use_T2.connect(inputspec, 'func', func_brain, 'in_file') - register_func_to_anat_use_T2.connect(func_brain_mask, 'out_file', func_brain, 'operand_files') + register_func_to_anat_use_T2.connect(inputspec, "func", func_brain, "in_file") + register_func_to_anat_use_T2.connect( + func_brain_mask, "out_file", func_brain, "operand_files" + ) # ## re-registering the maked brain to the T1 brain: # ${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${fMRIFolder}/Scout_brain_dc.nii.gz -ref ${T1wFolder}/${T1wRestoreImageBrain} -omat "$fMRIFolder"/${ScoutName}_gdc2T1w_init.mat -out ${fMRIFolder}/${ScoutName}_gdc2T1w_brain_init -searchrx -30 30 -searchry -30 30 -searchrz -30 30 -cost mutualinfo - linear_reg_func_to_t1 = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_func_to_t1') - linear_reg_func_to_t1.inputs.interp = 'spline' - linear_reg_func_to_t1.inputs.cost = 'mutualinfo' + linear_reg_func_to_t1 = pe.Node(interface=fsl.FLIRT(), name="linear_reg_func_to_t1") + linear_reg_func_to_t1.inputs.interp = "spline" + linear_reg_func_to_t1.inputs.cost = "mutualinfo" linear_reg_func_to_t1.inputs.dof = 6 linear_reg_func_to_t1.inputs.searchr_x = [30, 30] linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] - register_func_to_anat_use_T2.connect(func_brain, 'out_file', linear_reg_func_to_t1, 'in_file') + register_func_to_anat_use_T2.connect( + func_brain, "out_file", linear_reg_func_to_t1, "in_file" + ) - register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', linear_reg_func_to_t1, 'reference') + register_func_to_anat_use_T2.connect( + inputspec, "T1_brain", linear_reg_func_to_t1, "reference" + ) # #taking out warpfield as it is not being made without a fieldmap. # ${FSLDIR}/bin/convertwarp --relout --rel -r ${T1wFolder}/${T2wRestoreImage} --postmat=${fMRIFolder}/${ScoutName}_gdc2T1w_init.mat -o ${fMRIFolder}/${ScoutName}_gdc2T1w_init_warp - convert_warp = pe.Node(interface=fsl.ConvertWarp(), name='convert_warp') + convert_warp = pe.Node(interface=fsl.ConvertWarp(), name="convert_warp") convert_warp.inputs.out_relwarp = True convert_warp.inputs.relwarp = True - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_matrix_file', convert_warp, 'postmat') - - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', convert_warp, 'reference') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, "out_matrix_file", convert_warp, "postmat" + ) + register_func_to_anat_use_T2.connect( + inputspec, "T2_head", convert_warp, "reference" + ) - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_matrix_file', - outputspec, - 'func_to_anat_linear_xfm_nobbreg') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, + "out_matrix_file", + outputspec, + "func_to_anat_linear_xfm_nobbreg", + ) - register_func_to_anat_use_T2.connect(convert_warp, 'out_file', - outputspec, - 'func_to_anat_linear_warp_nobbreg') + register_func_to_anat_use_T2.connect( + convert_warp, "out_file", outputspec, "func_to_anat_linear_warp_nobbreg" + ) - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_file', - outputspec, 'anat_func_nobbreg') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, "out_file", outputspec, "anat_func_nobbreg" + ) return register_func_to_anat_use_T2 -def create_bbregister_func_to_anat(phase_diff_distcor=False, - name='bbregister_func_to_anat'): - +def create_bbregister_func_to_anat( + phase_diff_distcor=False, name="bbregister_func_to_anat" +): """ Registers a functional scan in native space to structural. This is meant to be used after create_nonlinear_register() has been run and @@ -913,7 +980,6 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, Notes ----- - Workflow Inputs:: inputspec.func : string (nifti file) @@ -934,105 +1000,115 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, outputspec.anat_func : string (nifti file) Functional data in anatomical space """ - register_bbregister_func_to_anat = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface(fields=['func', - 'anat', - 'linear_reg_matrix', - 'anat_wm_segmentation', - 'bbr_schedule', - 'bbr_wm_mask_args', - 'fieldmap', - 'fieldmapmask']), - name='inputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "func", + "anat", + "linear_reg_matrix", + "anat_wm_segmentation", + "bbr_schedule", + "bbr_wm_mask_args", + "fieldmap", + "fieldmapmask", + ] + ), + name="inputspec", + ) inputNode_echospacing = pe.Node( - util.IdentityInterface(fields=['echospacing']), - name='echospacing_input') + util.IdentityInterface(fields=["echospacing"]), name="echospacing_input" + ) - inputNode_pedir = pe.Node(util.IdentityInterface(fields=['pedir']), - name='pedir_input') + inputNode_pedir = pe.Node( + util.IdentityInterface(fields=["pedir"]), name="pedir_input" + ) - outputspec = pe.Node(util.IdentityInterface( - fields=['func_to_anat_linear_xfm', 'anat_func']), name='outputspec') + outputspec = pe.Node( + util.IdentityInterface(fields=["func_to_anat_linear_xfm", "anat_func"]), + name="outputspec", + ) - wm_bb_mask = pe.Node(interface=fsl.ImageMaths(), - name='wm_bb_mask') + wm_bb_mask = pe.Node(interface=fsl.ImageMaths(), name="wm_bb_mask") register_bbregister_func_to_anat.connect( - inputspec, 'bbr_wm_mask_args', - wm_bb_mask, 'op_string') + inputspec, "bbr_wm_mask_args", wm_bb_mask, "op_string" + ) - register_bbregister_func_to_anat.connect(inputspec, - 'anat_wm_segmentation', - wm_bb_mask, 'in_file') + register_bbregister_func_to_anat.connect( + inputspec, "anat_wm_segmentation", wm_bb_mask, "in_file" + ) def bbreg_args(bbreg_target): - return '-cost bbr -wmseg ' + bbreg_target + return "-cost bbr -wmseg " + bbreg_target - bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='bbreg_func_to_anat') + bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), name="bbreg_func_to_anat") bbreg_func_to_anat.inputs.dof = 6 register_bbregister_func_to_anat.connect( - inputspec, 'bbr_schedule', - bbreg_func_to_anat, 'schedule') + inputspec, "bbr_schedule", bbreg_func_to_anat, "schedule" + ) register_bbregister_func_to_anat.connect( - wm_bb_mask, ('out_file', bbreg_args), - bbreg_func_to_anat, 'args') + wm_bb_mask, ("out_file", bbreg_args), bbreg_func_to_anat, "args" + ) register_bbregister_func_to_anat.connect( - inputspec, 'func', - bbreg_func_to_anat, 'in_file') + inputspec, "func", bbreg_func_to_anat, "in_file" + ) register_bbregister_func_to_anat.connect( - inputspec, 'anat', - bbreg_func_to_anat, 'reference') + inputspec, "anat", bbreg_func_to_anat, "reference" + ) register_bbregister_func_to_anat.connect( - inputspec, 'linear_reg_matrix', - bbreg_func_to_anat, 'in_matrix_file') + inputspec, "linear_reg_matrix", bbreg_func_to_anat, "in_matrix_file" + ) if phase_diff_distcor: - conv_pedir = \ - pe.Node(interface=util.Function(input_names=['pedir', - 'convert'], - output_names=['pedir'], - function=convert_pedir), - name='bbreg_convert_pedir') - conv_pedir.inputs.convert = 'xyz_to_int' - - register_bbregister_func_to_anat.connect(inputNode_pedir, 'pedir', - conv_pedir, 'pedir') - register_bbregister_func_to_anat.connect(conv_pedir, 'pedir', - bbreg_func_to_anat, 'pedir') + conv_pedir = pe.Node( + interface=util.Function( + input_names=["pedir", "convert"], + output_names=["pedir"], + function=convert_pedir, + ), + name="bbreg_convert_pedir", + ) + conv_pedir.inputs.convert = "xyz_to_int" + + register_bbregister_func_to_anat.connect( + inputNode_pedir, "pedir", conv_pedir, "pedir" + ) register_bbregister_func_to_anat.connect( - inputspec, 'fieldmap', - bbreg_func_to_anat, 'fieldmap') + conv_pedir, "pedir", bbreg_func_to_anat, "pedir" + ) register_bbregister_func_to_anat.connect( - inputspec, 'fieldmapmask', - bbreg_func_to_anat, 'fieldmapmask') + inputspec, "fieldmap", bbreg_func_to_anat, "fieldmap" + ) register_bbregister_func_to_anat.connect( - inputNode_echospacing, 'echospacing', - bbreg_func_to_anat, 'echospacing') + inputspec, "fieldmapmask", bbreg_func_to_anat, "fieldmapmask" + ) + register_bbregister_func_to_anat.connect( + inputNode_echospacing, "echospacing", bbreg_func_to_anat, "echospacing" + ) register_bbregister_func_to_anat.connect( - bbreg_func_to_anat, 'out_matrix_file', - outputspec, 'func_to_anat_linear_xfm') + bbreg_func_to_anat, "out_matrix_file", outputspec, "func_to_anat_linear_xfm" + ) register_bbregister_func_to_anat.connect( - bbreg_func_to_anat, 'out_file', - outputspec, 'anat_func') + bbreg_func_to_anat, "out_file", outputspec, "anat_func" + ) return register_bbregister_func_to_anat def create_wf_calculate_ants_warp( - name='create_wf_calculate_ants_warp', num_threads=1, reg_ants_skull=1 + name="create_wf_calculate_ants_warp", num_threads=1, reg_ants_skull=1 ): - ''' + """ Calculates the nonlinear ANTS registration transform. This workflow employs the antsRegistration tool: @@ -1050,7 +1126,6 @@ def create_wf_calculate_ants_warp( Notes ----- - Some of the inputs listed below are lists or lists of lists. This is because antsRegistration can perform multiple stages of calculations depending on how the user configures their registration. @@ -1157,798 +1232,986 @@ def create_wf_calculate_ants_warp( .. image:: :width: 500 - ''' - + """ calc_ants_warp_wf = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface( - fields=['moving_brain', - 'reference_brain', - 'moving_skull', - 'reference_skull', - 'reference_mask', - 'moving_mask', - 'fixed_image_mask', - 'ants_para', - 'interp']), - name='inputspec') - - outputspec = pe.Node(util.IdentityInterface( - fields=['ants_initial_xfm', - 'ants_rigid_xfm', - 'ants_affine_xfm', - 'warp_field', - 'inverse_warp_field', - 'composite_transform', - 'wait', - 'normalized_output_brain']), name='outputspec') + inputspec = pe.Node( + util.IdentityInterface( + fields=[ + "moving_brain", + "reference_brain", + "moving_skull", + "reference_skull", + "reference_mask", + "moving_mask", + "fixed_image_mask", + "ants_para", + "interp", + ] + ), + name="inputspec", + ) + + outputspec = pe.Node( + util.IdentityInterface( + fields=[ + "ants_initial_xfm", + "ants_rigid_xfm", + "ants_affine_xfm", + "warp_field", + "inverse_warp_field", + "composite_transform", + "wait", + "normalized_output_brain", + ] + ), + name="outputspec", + ) # use ANTS to warp the masked anatomical image to a template image - ''' + """ calculate_ants_warp = pe.Node(interface=ants.Registration(), name='calculate_ants_warp') calculate_ants_warp.inputs.output_warped_image = True calculate_ants_warp.inputs.initial_moving_transform_com = 0 - ''' - reg_imports = ['import os', 'import subprocess'] - calculate_ants_warp = \ - pe.Node(interface=util.Function(input_names=['moving_brain', - 'reference_brain', - 'moving_skull', - 'reference_skull', - 'ants_para', - 'moving_mask', - 'reference_mask', - 'fixed_image_mask', - 'interp', - 'reg_with_skull'], - output_names=['warp_list', - 'warped_image'], - function=hardcoded_reg, - imports=reg_imports), - name='calc_ants_warp', - mem_gb=2.8, - mem_x=(2e-7, 'moving_brain', 'xyz'), - throttle=True) + """ + reg_imports = ["import os", "import subprocess"] + calculate_ants_warp = pe.Node( + interface=util.Function( + input_names=[ + "moving_brain", + "reference_brain", + "moving_skull", + "reference_skull", + "ants_para", + "moving_mask", + "reference_mask", + "fixed_image_mask", + "interp", + "reg_with_skull", + ], + output_names=["warp_list", "warped_image"], + function=hardcoded_reg, + imports=reg_imports, + ), + name="calc_ants_warp", + mem_gb=2.8, + mem_x=(2e-7, "moving_brain", "xyz"), + throttle=True, + ) calculate_ants_warp.interface.num_threads = num_threads - select_forward_initial = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_initial') + select_forward_initial = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_initial", + ) select_forward_initial.inputs.selection = "Initial" - select_forward_rigid = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_rigid') + select_forward_rigid = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_rigid", + ) select_forward_rigid.inputs.selection = "Rigid" - select_forward_affine = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_affine') + select_forward_affine = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_affine", + ) select_forward_affine.inputs.selection = "Affine" - select_forward_warp = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_forward_warp') + select_forward_warp = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_forward_warp", + ) select_forward_warp.inputs.selection = "Warp" - select_inverse_warp = pe.Node(util.Function( - input_names=['warp_list', 'selection'], - output_names=['selected_warp'], - function=seperate_warps_list), name='select_inverse_warp') + select_inverse_warp = pe.Node( + util.Function( + input_names=["warp_list", "selection"], + output_names=["selected_warp"], + function=seperate_warps_list, + ), + name="select_inverse_warp", + ) select_inverse_warp.inputs.selection = "Inverse" calc_ants_warp_wf.connect( - inputspec, 'moving_brain', - calculate_ants_warp, 'moving_brain') + inputspec, "moving_brain", calculate_ants_warp, "moving_brain" + ) calc_ants_warp_wf.connect( - inputspec, 'reference_brain', - calculate_ants_warp, 'reference_brain') + inputspec, "reference_brain", calculate_ants_warp, "reference_brain" + ) if reg_ants_skull == 1: - calculate_ants_warp.inputs.reg_with_skull = 1 calc_ants_warp_wf.connect( - inputspec, 'moving_skull', - calculate_ants_warp, 'moving_skull') + inputspec, "moving_skull", calculate_ants_warp, "moving_skull" + ) calc_ants_warp_wf.connect( - inputspec, 'reference_skull', - calculate_ants_warp, 'reference_skull') + inputspec, "reference_skull", calculate_ants_warp, "reference_skull" + ) else: calc_ants_warp_wf.connect( - inputspec, 'moving_brain', - calculate_ants_warp, 'moving_skull') + inputspec, "moving_brain", calculate_ants_warp, "moving_skull" + ) calc_ants_warp_wf.connect( - inputspec, 'reference_brain', - calculate_ants_warp, 'reference_skull') + inputspec, "reference_brain", calculate_ants_warp, "reference_skull" + ) calc_ants_warp_wf.connect( - inputspec, 'fixed_image_mask', - calculate_ants_warp, 'fixed_image_mask') + inputspec, "fixed_image_mask", calculate_ants_warp, "fixed_image_mask" + ) - calc_ants_warp_wf.connect(inputspec, 'reference_mask', - calculate_ants_warp, 'reference_mask') + calc_ants_warp_wf.connect( + inputspec, "reference_mask", calculate_ants_warp, "reference_mask" + ) - calc_ants_warp_wf.connect(inputspec, 'moving_mask', - calculate_ants_warp, 'moving_mask') + calc_ants_warp_wf.connect( + inputspec, "moving_mask", calculate_ants_warp, "moving_mask" + ) - calc_ants_warp_wf.connect(inputspec, 'ants_para', - calculate_ants_warp, 'ants_para') + calc_ants_warp_wf.connect(inputspec, "ants_para", calculate_ants_warp, "ants_para") - calc_ants_warp_wf.connect( - inputspec, 'interp', - calculate_ants_warp, 'interp') + calc_ants_warp_wf.connect(inputspec, "interp", calculate_ants_warp, "interp") # inter-workflow connections calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_initial, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_initial, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_rigid, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_rigid, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_affine, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_affine, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_warp, 'warp_list') + calculate_ants_warp, "warp_list", select_forward_warp, "warp_list" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_inverse_warp, 'warp_list') + calculate_ants_warp, "warp_list", select_inverse_warp, "warp_list" + ) # connections to outputspec calc_ants_warp_wf.connect( - select_forward_initial, 'selected_warp', - outputspec, 'ants_initial_xfm') + select_forward_initial, "selected_warp", outputspec, "ants_initial_xfm" + ) calc_ants_warp_wf.connect( - select_forward_rigid, 'selected_warp', - outputspec, 'ants_rigid_xfm') + select_forward_rigid, "selected_warp", outputspec, "ants_rigid_xfm" + ) calc_ants_warp_wf.connect( - select_forward_affine, 'selected_warp', - outputspec, 'ants_affine_xfm') + select_forward_affine, "selected_warp", outputspec, "ants_affine_xfm" + ) calc_ants_warp_wf.connect( - select_forward_warp, 'selected_warp', - outputspec, 'warp_field') + select_forward_warp, "selected_warp", outputspec, "warp_field" + ) calc_ants_warp_wf.connect( - select_inverse_warp, 'selected_warp', - outputspec, 'inverse_warp_field') + select_inverse_warp, "selected_warp", outputspec, "inverse_warp_field" + ) calc_ants_warp_wf.connect( - calculate_ants_warp, 'warped_image', - outputspec, 'normalized_output_brain') + calculate_ants_warp, "warped_image", outputspec, "normalized_output_brain" + ) return calc_ants_warp_wf -def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, - symmetric=False, template="T1w"): - +def FSL_registration_connector( + wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w" +): wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_brain', - 'reference_brain', - 'input_head', - 'reference_head', - 'input_mask', - 'reference_mask', - 'transform', - 'interpolation', - 'fnirt_config']), - name='inputspec') - - sym = '' - symm = '' - if symmetric: - sym = 'sym' - symm = '_symmetric' + util.IdentityInterface( + fields=[ + "input_brain", + "reference_brain", + "input_head", + "reference_head", + "input_mask", + "reference_mask", + "transform", + "interpolation", + "fnirt_config", + ] + ), + name="inputspec", + ) - tmpl = '' - if template == 'EPI': - tmpl = 'EPI' + sym = "" + symm = "" + if symmetric: + sym = "sym" + symm = "_symmetric" - if opt == 'FSL' or opt == 'FSL-linear': + tmpl = "" + if template == "EPI": + tmpl = "EPI" + if opt == "FSL" or opt == "FSL-linear": flirt_reg_anat_mni = create_fsl_flirt_linear_reg( - f'anat_mni_flirt_register{symm}' + f"anat_mni_flirt_register{symm}" ) # Input registration parameters - wf.connect(inputNode, 'interpolation', - flirt_reg_anat_mni, 'inputspec.interp') + wf.connect(inputNode, "interpolation", flirt_reg_anat_mni, "inputspec.interp") - wf.connect(inputNode, 'input_brain', - flirt_reg_anat_mni, 'inputspec.input_brain') + wf.connect( + inputNode, "input_brain", flirt_reg_anat_mni, "inputspec.input_brain" + ) - wf.connect(inputNode, 'reference_brain', flirt_reg_anat_mni, - 'inputspec.reference_brain') + wf.connect( + inputNode, + "reference_brain", + flirt_reg_anat_mni, + "inputspec.reference_brain", + ) - write_lin_composite_xfm = pe.Node(interface=fsl.ConvertWarp(), - name=f'fsl_lin-warp_to_nii{symm}') + write_lin_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name=f"fsl_lin-warp_to_nii{symm}" + ) - wf.connect(inputNode, 'reference_brain', - write_lin_composite_xfm, 'reference') + wf.connect(inputNode, "reference_brain", write_lin_composite_xfm, "reference") - wf.connect(flirt_reg_anat_mni, 'outputspec.linear_xfm', - write_lin_composite_xfm, 'premat') + wf.connect( + flirt_reg_anat_mni, + "outputspec.linear_xfm", + write_lin_composite_xfm, + "premat", + ) - write_invlin_composite_xfm = pe.Node(interface=fsl.ConvertWarp(), - name=f'fsl_invlin-warp_to_' - f'nii{symm}') + write_invlin_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_" f"nii{symm}" + ) - wf.connect(inputNode, 'reference_brain', - write_invlin_composite_xfm, 'reference') + wf.connect( + inputNode, "reference_brain", write_invlin_composite_xfm, "reference" + ) - wf.connect(flirt_reg_anat_mni, 'outputspec.invlinear_xfm', - write_invlin_composite_xfm, 'premat') + wf.connect( + flirt_reg_anat_mni, + "outputspec.invlinear_xfm", + write_invlin_composite_xfm, + "premat", + ) outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - flirt_reg_anat_mni, 'outputspec.output_brain'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm': ( - write_lin_composite_xfm, 'out_file'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm': ( - write_invlin_composite_xfm, 'out_file'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - write_lin_composite_xfm, 'out_file') + f"space-{sym}template_desc-preproc_{orig}": ( + flirt_reg_anat_mni, + "outputspec.output_brain", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( + write_lin_composite_xfm, + "out_file", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( + write_invlin_composite_xfm, + "out_file", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( + write_lin_composite_xfm, + "out_file", + ), } - - if opt == 'FSL': + if opt == "FSL": fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp( - f'anat_mni_fnirt_register{symm}' + f"anat_mni_fnirt_register{symm}" ) - wf.connect(inputNode, 'input_brain', - fnirt_reg_anat_mni, 'inputspec.input_brain') + wf.connect( + inputNode, "input_brain", fnirt_reg_anat_mni, "inputspec.input_brain" + ) - wf.connect(inputNode, 'reference_brain', - fnirt_reg_anat_mni, 'inputspec.reference_brain') + wf.connect( + inputNode, + "reference_brain", + fnirt_reg_anat_mni, + "inputspec.reference_brain", + ) - wf.connect(inputNode, 'input_head', - fnirt_reg_anat_mni, 'inputspec.input_skull') + wf.connect(inputNode, "input_head", fnirt_reg_anat_mni, "inputspec.input_skull") # NOTE: crossover from above opt block - wf.connect(flirt_reg_anat_mni, 'outputspec.linear_xfm', - fnirt_reg_anat_mni, 'inputspec.linear_aff') + wf.connect( + flirt_reg_anat_mni, + "outputspec.linear_xfm", + fnirt_reg_anat_mni, + "inputspec.linear_aff", + ) - wf.connect(inputNode, 'reference_head', - fnirt_reg_anat_mni, 'inputspec.reference_skull') + wf.connect( + inputNode, "reference_head", fnirt_reg_anat_mni, "inputspec.reference_skull" + ) - wf.connect(inputNode, 'reference_mask', - fnirt_reg_anat_mni, 'inputspec.ref_mask') + wf.connect( + inputNode, "reference_mask", fnirt_reg_anat_mni, "inputspec.ref_mask" + ) # assign the FSL FNIRT config file specified in pipeline config.yml - wf.connect(inputNode, 'fnirt_config', - fnirt_reg_anat_mni, 'inputspec.fnirt_config') + wf.connect( + inputNode, "fnirt_config", fnirt_reg_anat_mni, "inputspec.fnirt_config" + ) # NOTE: this is an UPDATE because of the opt block above added_outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_brain'), - f'space-{sym}template_desc-head_{orig}': ( - fnirt_reg_anat_mni, 'outputspec.output_head'), - f'space-{sym}template_desc-{orig}_mask': ( - fnirt_reg_anat_mni, 'outputspec.output_mask'), - f'space-{sym}template_desc-T1wT2w_biasfield': ( - fnirt_reg_anat_mni, 'outputspec.output_biasfield'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_warp': ( - fnirt_reg_anat_mni, 'outputspec.nonlinear_warp') + f"space-{sym}template_desc-preproc_{orig}": ( + fnirt_reg_anat_mni, + "outputspec.output_brain", + ), + f"space-{sym}template_desc-head_{orig}": ( + fnirt_reg_anat_mni, + "outputspec.output_head", + ), + f"space-{sym}template_desc-{orig}_mask": ( + fnirt_reg_anat_mni, + "outputspec.output_mask", + ), + f"space-{sym}template_desc-T1wT2w_biasfield": ( + fnirt_reg_anat_mni, + "outputspec.output_biasfield", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( + fnirt_reg_anat_mni, + "outputspec.nonlinear_xfm", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_warp": ( + fnirt_reg_anat_mni, + "outputspec.nonlinear_warp", + ), } outputs.update(added_outputs) return (wf, outputs) -def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", - symmetric=False, template="T1w"): - +def ANTs_registration_connector( + wf_name, cfg, params, orig="T1w", symmetric=False, template="T1w" +): wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_brain', - 'reference_brain', - 'input_head', - 'reference_head', - 'input_mask', - 'reference_mask', - 'transform', - 'interpolation']), - name='inputspec') - - sym = '' - symm = '' + util.IdentityInterface( + fields=[ + "input_brain", + "reference_brain", + "input_head", + "reference_head", + "input_mask", + "reference_mask", + "transform", + "interpolation", + ] + ), + name="inputspec", + ) + + sym = "" + symm = "" if symmetric: - sym = 'sym' - symm = '_symmetric' + sym = "sym" + symm = "_symmetric" - tmpl = '' - if template == 'EPI': - tmpl = 'EPI' + tmpl = "" + if template == "EPI": + tmpl = "EPI" if params is None: - err_msg = '\n\n[!] C-PAC says: \nYou have selected ANTs as your ' \ - 'anatomical registration method.\n' \ - 'However, no ANTs parameters were specified.\n' \ - 'Please specify ANTs parameters properly and try again.' + err_msg = ( + "\n\n[!] C-PAC says: \nYou have selected ANTs as your " + "anatomical registration method.\n" + "However, no ANTs parameters were specified.\n" + "Please specify ANTs parameters properly and try again." + ) raise Exception(err_msg) - ants_reg_anat_mni = \ - create_wf_calculate_ants_warp( - f'anat_mni_ants_register{symm}', - num_threads=cfg.pipeline_setup['system_config'][ - 'num_ants_threads'], - reg_ants_skull=cfg['registration_workflows'][ - 'anatomical_registration']['reg_with_skull'] - ) + ants_reg_anat_mni = create_wf_calculate_ants_warp( + f"anat_mni_ants_register{symm}", + num_threads=cfg.pipeline_setup["system_config"]["num_ants_threads"], + reg_ants_skull=cfg["registration_workflows"]["anatomical_registration"][ + "reg_with_skull" + ], + ) ants_reg_anat_mni.inputs.inputspec.ants_para = params - wf.connect(inputNode, 'interpolation', - ants_reg_anat_mni, 'inputspec.interp') + wf.connect(inputNode, "interpolation", ants_reg_anat_mni, "inputspec.interp") # calculating the transform with the skullstripped is # reported to be better, but it requires very high # quality skullstripping. If skullstripping is imprecise # registration with skull is preferred - wf.connect(inputNode, 'input_brain', - ants_reg_anat_mni, 'inputspec.moving_brain') + wf.connect(inputNode, "input_brain", ants_reg_anat_mni, "inputspec.moving_brain") - wf.connect(inputNode, 'reference_brain', - ants_reg_anat_mni, 'inputspec.reference_brain') + wf.connect( + inputNode, "reference_brain", ants_reg_anat_mni, "inputspec.reference_brain" + ) - wf.connect(inputNode, 'input_head', - ants_reg_anat_mni, 'inputspec.moving_skull') + wf.connect(inputNode, "input_head", ants_reg_anat_mni, "inputspec.moving_skull") - wf.connect(inputNode, 'reference_head', - ants_reg_anat_mni, 'inputspec.reference_skull') + wf.connect( + inputNode, "reference_head", ants_reg_anat_mni, "inputspec.reference_skull" + ) - wf.connect(inputNode, 'input_mask', - ants_reg_anat_mni, 'inputspec.moving_mask') + wf.connect(inputNode, "input_mask", ants_reg_anat_mni, "inputspec.moving_mask") - wf.connect(inputNode, 'reference_mask', - ants_reg_anat_mni, 'inputspec.reference_mask') + wf.connect( + inputNode, "reference_mask", ants_reg_anat_mni, "inputspec.reference_mask" + ) ants_reg_anat_mni.inputs.inputspec.fixed_image_mask = None - if orig == 'T1w': - if cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['use_lesion_mask']: + if orig == "T1w": + if cfg.registration_workflows["anatomical_registration"]["registration"][ + "ANTs" + ]["use_lesion_mask"]: # Create lesion preproc node to apply afni Refit and Resample - lesion_preproc = create_lesion_preproc( - wf_name=f'lesion_preproc{symm}' + lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}") + wf.connect(inputNode, "lesion_mask", lesion_preproc, "inputspec.lesion") + wf.connect( + lesion_preproc, + "outputspec.reorient", + ants_reg_anat_mni, + "inputspec.fixed_image_mask", ) - wf.connect(inputNode, 'lesion_mask', - lesion_preproc, 'inputspec.lesion') - wf.connect(lesion_preproc, 'outputspec.reorient', - ants_reg_anat_mni, 'inputspec.fixed_image_mask') # combine the linear xfm's into one - makes it easier downstream write_composite_linear_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_linear{symm}_xfm', + name=f"write_composite_linear{symm}_xfm", mem_gb=1.155, - mem_x=(1708448960473801 / 1208925819614629174706176, 'input_image')) + mem_x=(1708448960473801 / 1208925819614629174706176, "input_image"), + ) write_composite_linear_xfm.inputs.print_out_composite_warp_file = True - write_composite_linear_xfm.inputs.output_image = \ + write_composite_linear_xfm.inputs.output_image = ( f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm.nii.gz" + ) - wf.connect(inputNode, 'input_brain', - write_composite_linear_xfm, 'input_image') + wf.connect(inputNode, "input_brain", write_composite_linear_xfm, "input_image") - wf.connect(inputNode, 'reference_brain', - write_composite_linear_xfm, 'reference_image') + wf.connect( + inputNode, "reference_brain", write_composite_linear_xfm, "reference_image" + ) - wf.connect(inputNode, 'interpolation', - write_composite_linear_xfm, 'interpolation') + wf.connect(inputNode, "interpolation", write_composite_linear_xfm, "interpolation") write_composite_linear_xfm.inputs.input_image_type = 0 write_composite_linear_xfm.inputs.dimension = 3 - collect_transforms = pe.Node(util.Merge(3), - name=f'collect_transforms{symm}', - mem_gb=0.8, - mem_x=(263474863123069 / - 37778931862957161709568, - 'in1')) + collect_transforms = pe.Node( + util.Merge(3), + name=f"collect_transforms{symm}", + mem_gb=0.8, + mem_x=(263474863123069 / 37778931862957161709568, "in1"), + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_affine_xfm", collect_transforms, "in1" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_rigid_xfm", collect_transforms, "in2" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_initial_xfm", collect_transforms, "in3" + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_transforms', - mem_gb=6) + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_transforms", + mem_gb=6, + ) - wf.connect(collect_transforms, 'out', check_transform, 'transform_list') + wf.connect(collect_transforms, "out", check_transform, "transform_list") - wf.connect(check_transform, 'checked_transform_list', - write_composite_linear_xfm, 'transforms') + wf.connect( + check_transform, + "checked_transform_list", + write_composite_linear_xfm, + "transforms", + ) # combine the linear xfm's into one - makes it easier downstream write_composite_invlinear_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_invlinear{symm}_xfm', + name=f"write_composite_invlinear{symm}_xfm", mem_gb=1.05, - mem_x=(1367826948979337 / 151115727451828646838272, 'input_image')) + mem_x=(1367826948979337 / 151115727451828646838272, "input_image"), + ) write_composite_invlinear_xfm.inputs.print_out_composite_warp_file = True - write_composite_invlinear_xfm.inputs.output_image = \ + write_composite_invlinear_xfm.inputs.output_image = ( f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm.nii.gz" + ) - wf.connect(inputNode, 'reference_brain', - write_composite_invlinear_xfm, 'input_image') + wf.connect( + inputNode, "reference_brain", write_composite_invlinear_xfm, "input_image" + ) - wf.connect(inputNode, 'input_brain', - write_composite_invlinear_xfm, 'reference_image') + wf.connect( + inputNode, "input_brain", write_composite_invlinear_xfm, "reference_image" + ) - wf.connect(inputNode, 'interpolation', - write_composite_invlinear_xfm, 'interpolation') + wf.connect( + inputNode, "interpolation", write_composite_invlinear_xfm, "interpolation" + ) write_composite_invlinear_xfm.inputs.input_image_type = 0 write_composite_invlinear_xfm.inputs.dimension = 3 - collect_inv_transforms = pe.Node(util.Merge(3), - name='collect_inv_transforms' - f'{symm}') + collect_inv_transforms = pe.Node( + util.Merge(3), name="collect_inv_transforms" f"{symm}" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_inv_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_initial_xfm", collect_inv_transforms, "in1" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_inv_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_rigid_xfm", collect_inv_transforms, "in2" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_inv_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_affine_xfm", collect_inv_transforms, "in3" + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_invlinear_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_inv_transforms') + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_inv_transforms", + ) - wf.connect(collect_inv_transforms, 'out', - check_invlinear_transform, 'transform_list') + wf.connect( + collect_inv_transforms, "out", check_invlinear_transform, "transform_list" + ) - wf.connect(check_invlinear_transform, 'checked_transform_list', - write_composite_invlinear_xfm, 'transforms') + wf.connect( + check_invlinear_transform, + "checked_transform_list", + write_composite_invlinear_xfm, + "transforms", + ) # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['inverse_transform_flags'], - function=generate_inverse_transform_flags), - name=f'inverse_transform_flags') + util.Function( + input_names=["transform_list"], + output_names=["inverse_transform_flags"], + function=generate_inverse_transform_flags, + ), + name="inverse_transform_flags", + ) - wf.connect(check_invlinear_transform, 'checked_transform_list', - inverse_transform_flags, 'transform_list') + wf.connect( + check_invlinear_transform, + "checked_transform_list", + inverse_transform_flags, + "transform_list", + ) - wf.connect(inverse_transform_flags, 'inverse_transform_flags', - write_composite_invlinear_xfm, 'invert_transform_flags') + wf.connect( + inverse_transform_flags, + "inverse_transform_flags", + write_composite_invlinear_xfm, + "invert_transform_flags", + ) # combine ALL xfm's into one - makes it easier downstream write_composite_xfm = pe.Node( - interface=ants.ApplyTransforms(), - name=f'write_composite_{symm}xfm', - mem_gb=1.5) + interface=ants.ApplyTransforms(), name=f"write_composite_{symm}xfm", mem_gb=1.5 + ) write_composite_xfm.inputs.print_out_composite_warp_file = True - write_composite_xfm.inputs.output_image = \ + write_composite_xfm.inputs.output_image = ( f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'input_brain', write_composite_xfm, 'input_image') + wf.connect(inputNode, "input_brain", write_composite_xfm, "input_image") - wf.connect(inputNode, 'reference_brain', - write_composite_xfm, 'reference_image') + wf.connect(inputNode, "reference_brain", write_composite_xfm, "reference_image") - wf.connect(inputNode, 'interpolation', - write_composite_xfm, 'interpolation') + wf.connect(inputNode, "interpolation", write_composite_xfm, "interpolation") write_composite_xfm.inputs.input_image_type = 0 write_composite_xfm.inputs.dimension = 3 - collect_all_transforms = pe.Node(util.Merge(4), - name=f'collect_all_transforms' - f'{symm}') + collect_all_transforms = pe.Node( + util.Merge(4), name=f"collect_all_transforms" f"{symm}" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.warp_field', - collect_all_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, "outputspec.warp_field", collect_all_transforms, "in1" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_all_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_affine_xfm", collect_all_transforms, "in2" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_all_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_rigid_xfm", collect_all_transforms, "in3" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_all_transforms, 'in4') + wf.connect( + ants_reg_anat_mni, "outputspec.ants_initial_xfm", collect_all_transforms, "in4" + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_all_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_all_transforms') + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_all_transforms", + ) - wf.connect(collect_all_transforms, 'out', - check_all_transform, 'transform_list') + wf.connect(collect_all_transforms, "out", check_all_transform, "transform_list") - wf.connect(check_all_transform, 'checked_transform_list', - write_composite_xfm, 'transforms') + wf.connect( + check_all_transform, "checked_transform_list", write_composite_xfm, "transforms" + ) # combine ALL xfm's into one - makes it easier downstream write_composite_inv_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_inv_{symm}xfm', + name=f"write_composite_inv_{symm}xfm", mem_gb=0.3, - mem_x=(6278549929741219 / 604462909807314587353088, 'input_image')) + mem_x=(6278549929741219 / 604462909807314587353088, "input_image"), + ) write_composite_inv_xfm.inputs.print_out_composite_warp_file = True - write_composite_inv_xfm.inputs.output_image = \ + write_composite_inv_xfm.inputs.output_image = ( f"from-{sym}{tmpl}template_to-{orig}_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'reference_brain', - write_composite_inv_xfm, 'input_image') + wf.connect(inputNode, "reference_brain", write_composite_inv_xfm, "input_image") - wf.connect(inputNode, 'input_brain', - write_composite_inv_xfm, 'reference_image') + wf.connect(inputNode, "input_brain", write_composite_inv_xfm, "reference_image") - wf.connect(inputNode, 'interpolation', - write_composite_inv_xfm, 'interpolation') + wf.connect(inputNode, "interpolation", write_composite_inv_xfm, "interpolation") write_composite_inv_xfm.inputs.input_image_type = 0 write_composite_inv_xfm.inputs.dimension = 3 - collect_all_inv_transforms = pe.Node(util.Merge(4), - name=f'collect_all_inv_transforms' - f'{symm}') + collect_all_inv_transforms = pe.Node( + util.Merge(4), name=f"collect_all_inv_transforms" f"{symm}" + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', - collect_all_inv_transforms, 'in1') + wf.connect( + ants_reg_anat_mni, + "outputspec.ants_initial_xfm", + collect_all_inv_transforms, + "in1", + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', - collect_all_inv_transforms, 'in2') + wf.connect( + ants_reg_anat_mni, + "outputspec.ants_rigid_xfm", + collect_all_inv_transforms, + "in2", + ) - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', - collect_all_inv_transforms, 'in3') + wf.connect( + ants_reg_anat_mni, + "outputspec.ants_affine_xfm", + collect_all_inv_transforms, + "in3", + ) - wf.connect(ants_reg_anat_mni, 'outputspec.inverse_warp_field', - collect_all_inv_transforms, 'in4') + wf.connect( + ants_reg_anat_mni, + "outputspec.inverse_warp_field", + collect_all_inv_transforms, + "in4", + ) # check transform list to exclude Nonetype (missing) init/rig/affine check_all_inv_transform = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['checked_transform_list', - 'list_length'], - function=check_transforms), - name=f'check_all_inv_transforms') + util.Function( + input_names=["transform_list"], + output_names=["checked_transform_list", "list_length"], + function=check_transforms, + ), + name="check_all_inv_transforms", + ) - wf.connect(collect_all_inv_transforms, 'out', - check_all_inv_transform, 'transform_list') + wf.connect( + collect_all_inv_transforms, "out", check_all_inv_transform, "transform_list" + ) - wf.connect(check_all_inv_transform, 'checked_transform_list', - write_composite_inv_xfm, 'transforms') + wf.connect( + check_all_inv_transform, + "checked_transform_list", + write_composite_inv_xfm, + "transforms", + ) # generate inverse transform flags, which depends on the # number of transforms inverse_all_transform_flags = pe.Node( - util.Function(input_names=['transform_list'], - output_names=['inverse_transform_flags'], - function=generate_inverse_transform_flags), - name=f'inverse_all_transform_flags') + util.Function( + input_names=["transform_list"], + output_names=["inverse_transform_flags"], + function=generate_inverse_transform_flags, + ), + name="inverse_all_transform_flags", + ) - wf.connect(check_all_inv_transform, 'checked_transform_list', - inverse_all_transform_flags, 'transform_list') + wf.connect( + check_all_inv_transform, + "checked_transform_list", + inverse_all_transform_flags, + "transform_list", + ) - wf.connect(inverse_all_transform_flags, 'inverse_transform_flags', - write_composite_inv_xfm, 'invert_transform_flags') + wf.connect( + inverse_all_transform_flags, + "inverse_transform_flags", + write_composite_inv_xfm, + "invert_transform_flags", + ) outputs = { - f'space-{sym}template_desc-preproc_{orig}': ( - ants_reg_anat_mni, 'outputspec.normalized_output_brain'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_xfm': ( - write_composite_xfm, 'output_image'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_xfm': ( - write_composite_inv_xfm, 'output_image'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm': ( - write_composite_linear_xfm, 'output_image'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm': ( - write_composite_invlinear_xfm, 'output_image'), - f'from-{orig}_to-{sym}{tmpl}template_mode-image_desc-nonlinear_xfm': ( - ants_reg_anat_mni, 'outputspec.warp_field'), - f'from-{sym}{tmpl}template_to-{orig}_mode-image_desc-nonlinear_xfm': ( - ants_reg_anat_mni, 'outputspec.inverse_warp_field') + f"space-{sym}template_desc-preproc_{orig}": ( + ants_reg_anat_mni, + "outputspec.normalized_output_brain", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": ( + write_composite_xfm, + "output_image", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_xfm": ( + write_composite_inv_xfm, + "output_image", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-linear_xfm": ( + write_composite_linear_xfm, + "output_image", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-linear_xfm": ( + write_composite_invlinear_xfm, + "output_image", + ), + f"from-{orig}_to-{sym}{tmpl}template_mode-image_desc-nonlinear_xfm": ( + ants_reg_anat_mni, + "outputspec.warp_field", + ), + f"from-{sym}{tmpl}template_to-{orig}_mode-image_desc-nonlinear_xfm": ( + ants_reg_anat_mni, + "outputspec.inverse_warp_field", + ), } return (wf, outputs) -def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False, - blip=False): - +def bold_to_T1template_xfm_connector( + wf_name, cfg, reg_tool, symmetric=False, blip=False +): wf = pe.Workflow(name=wf_name) inputNode = pe.Node( - util.IdentityInterface(fields=['input_brain', - 'mean_bold', - 'coreg_xfm', - 'T1w-brain-template_funcreg', - 'T1w_to_template_xfm', - 'template_to_T1w_xfm', - 'blip_warp']), - name='inputspec') - - sym = '' + util.IdentityInterface( + fields=[ + "input_brain", + "mean_bold", + "coreg_xfm", + "T1w-brain-template_funcreg", + "T1w_to_template_xfm", + "template_to_T1w_xfm", + "blip_warp", + ] + ), + name="inputspec", + ) + + sym = "" if symmetric: - sym = 'sym' + sym = "sym" - if reg_tool == 'ants': - fsl_reg_2_itk = pe.Node(c3.C3dAffineTool(), name='fsl_reg_2_itk') + if reg_tool == "ants": + fsl_reg_2_itk = pe.Node(c3.C3dAffineTool(), name="fsl_reg_2_itk") fsl_reg_2_itk.inputs.itk_transform = True fsl_reg_2_itk.inputs.fsl2ras = True # convert the .mat from linear Func->Anat to # ANTS format - wf.connect(inputNode, 'coreg_xfm', fsl_reg_2_itk, 'transform_file') + wf.connect(inputNode, "coreg_xfm", fsl_reg_2_itk, "transform_file") - wf.connect(inputNode, 'input_brain', fsl_reg_2_itk, 'reference_file') + wf.connect(inputNode, "input_brain", fsl_reg_2_itk, "reference_file") - wf.connect(inputNode, 'mean_bold', fsl_reg_2_itk, 'source_file') + wf.connect(inputNode, "mean_bold", fsl_reg_2_itk, "source_file") - itk_imports = ['import os'] - change_transform = pe.Node(util.Function( - input_names=['input_affine_file'], - output_names=['updated_affine_file'], - function=change_itk_transform_type, - imports=itk_imports), - name='change_transform_type') + itk_imports = ["import os"] + change_transform = pe.Node( + util.Function( + input_names=["input_affine_file"], + output_names=["updated_affine_file"], + function=change_itk_transform_type, + imports=itk_imports, + ), + name="change_transform_type", + ) - wf.connect(fsl_reg_2_itk, 'itk_transform', - change_transform, 'input_affine_file') + wf.connect( + fsl_reg_2_itk, "itk_transform", change_transform, "input_affine_file" + ) # combine ALL xfm's into one - makes it easier downstream write_composite_xfm = pe.Node( - interface=ants.ApplyTransforms(), - name=f'write_composite_xfm', - mem_gb=1.5) + interface=ants.ApplyTransforms(), name="write_composite_xfm", mem_gb=1.5 + ) write_composite_xfm.inputs.print_out_composite_warp_file = True - write_composite_xfm.inputs.output_image = \ + write_composite_xfm.inputs.output_image = ( f"from-bold_to-{sym}template_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'mean_bold', - write_composite_xfm, 'input_image') + wf.connect(inputNode, "mean_bold", write_composite_xfm, "input_image") - wf.connect(inputNode, 'T1w-brain-template_funcreg', - write_composite_xfm, 'reference_image') + wf.connect( + inputNode, + "T1w-brain-template_funcreg", + write_composite_xfm, + "reference_image", + ) write_composite_xfm.inputs.input_image_type = 0 write_composite_xfm.inputs.dimension = 3 - write_composite_xfm.inputs.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['interpolation'] + write_composite_xfm.inputs.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] if not blip: - collect_all_transforms = pe.Node(util.Merge(2), - name='collect_all_transforms') + collect_all_transforms = pe.Node( + util.Merge(2), name="collect_all_transforms" + ) else: - collect_all_transforms = pe.Node(util.Merge(3), - name='collect_all_transforms') + collect_all_transforms = pe.Node( + util.Merge(3), name="collect_all_transforms" + ) - wf.connect(inputNode, 'blip_warp', - collect_all_transforms, 'in3') + wf.connect(inputNode, "blip_warp", collect_all_transforms, "in3") - wf.connect(inputNode, 'T1w_to_template_xfm', - collect_all_transforms, 'in1') + wf.connect(inputNode, "T1w_to_template_xfm", collect_all_transforms, "in1") - wf.connect(change_transform, 'updated_affine_file', - collect_all_transforms, 'in2') + wf.connect( + change_transform, "updated_affine_file", collect_all_transforms, "in2" + ) - wf.connect(collect_all_transforms, 'out', - write_composite_xfm, 'transforms') + wf.connect(collect_all_transforms, "out", write_composite_xfm, "transforms") write_composite_inv_xfm = pe.Node( - interface=ants.ApplyTransforms(), - name=f'write_composite_inv_xfm', - mem_gb=1.5) + interface=ants.ApplyTransforms(), name="write_composite_inv_xfm", mem_gb=1.5 + ) write_composite_inv_xfm.inputs.print_out_composite_warp_file = True write_composite_inv_xfm.inputs.invert_transform_flags = [True, False] - write_composite_inv_xfm.inputs.output_image = \ + write_composite_inv_xfm.inputs.output_image = ( f"from-{sym}template_to-bold_mode-image_xfm.nii.gz" + ) - wf.connect(inputNode, 'T1w-brain-template_funcreg', - write_composite_inv_xfm, 'input_image') + wf.connect( + inputNode, + "T1w-brain-template_funcreg", + write_composite_inv_xfm, + "input_image", + ) - wf.connect(inputNode, 'mean_bold', - write_composite_inv_xfm, 'reference_image') + wf.connect(inputNode, "mean_bold", write_composite_inv_xfm, "reference_image") write_composite_inv_xfm.inputs.input_image_type = 0 write_composite_inv_xfm.inputs.dimension = 3 - write_composite_inv_xfm.inputs.interpolation = \ - cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['interpolation'] + write_composite_inv_xfm.inputs.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] - collect_inv_transforms = pe.Node(util.Merge(2), - name='collect_inv_transforms') + collect_inv_transforms = pe.Node(util.Merge(2), name="collect_inv_transforms") - wf.connect(change_transform, 'updated_affine_file', - collect_inv_transforms, 'in1') + wf.connect( + change_transform, "updated_affine_file", collect_inv_transforms, "in1" + ) - wf.connect(inputNode, 'template_to_T1w_xfm', - collect_inv_transforms, 'in2') + wf.connect(inputNode, "template_to_T1w_xfm", collect_inv_transforms, "in2") - wf.connect(collect_inv_transforms, 'out', - write_composite_inv_xfm, 'transforms') + wf.connect(collect_inv_transforms, "out", write_composite_inv_xfm, "transforms") outputs = { - f'from-bold_to-{sym}template_mode-image_xfm': - (write_composite_xfm, 'output_image'), - f'from-{sym}template_to-bold_mode-image_xfm': - (write_composite_inv_xfm, 'output_image') + f"from-bold_to-{sym}template_mode-image_xfm": ( + write_composite_xfm, + "output_image", + ), + f"from-{sym}template_to-bold_mode-image_xfm": ( + write_composite_inv_xfm, + "output_image", + ), } - elif reg_tool == 'fsl': - - write_composite_xfm = pe.Node(interface=fsl.ConvertWarp(), - name='combine_fsl_warps') + elif reg_tool == "fsl": + write_composite_xfm = pe.Node( + interface=fsl.ConvertWarp(), name="combine_fsl_warps" + ) - wf.connect(inputNode, 'T1w-brain-template_funcreg', - write_composite_xfm, 'reference') + wf.connect( + inputNode, "T1w-brain-template_funcreg", write_composite_xfm, "reference" + ) if blip: - wf.connect(inputNode, 'coreg_xfm', - write_composite_xfm, 'postmat') - wf.connect(inputNode, 'blip_warp', - write_composite_xfm, 'warp1') - wf.connect(inputNode, 'T1w_to_template_xfm', - write_composite_xfm, 'warp2') + wf.connect(inputNode, "coreg_xfm", write_composite_xfm, "postmat") + wf.connect(inputNode, "blip_warp", write_composite_xfm, "warp1") + wf.connect(inputNode, "T1w_to_template_xfm", write_composite_xfm, "warp2") else: - wf.connect(inputNode, 'coreg_xfm', - write_composite_xfm, 'premat') - wf.connect(inputNode, 'T1w_to_template_xfm', - write_composite_xfm, 'warp1') + wf.connect(inputNode, "coreg_xfm", write_composite_xfm, "premat") + wf.connect(inputNode, "T1w_to_template_xfm", write_composite_xfm, "warp1") outputs = { - f'from-bold_to-{sym}template_mode-image_xfm': - (write_composite_xfm, 'out_file'), + f"from-bold_to-{sym}template_mode-image_xfm": ( + write_composite_xfm, + "out_file", + ), } return (wf, outputs) @@ -1976,10 +2239,8 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False, "space-template_desc-head_T1w": {"Template": "T1w-template"}, "space-template_desc-T1w_mask": {"Template": "T1w-template"}, "space-template_desc-T1wT2w_biasfield": {"Template": "T1w-template"}, - "from-T1w_to-template_mode-image_desc-linear_xfm": { - "Template": "T1w-template"}, - "from-template_to-T1w_mode-image_desc-linear_xfm": { - "Template": "T1w-template"}, + "from-T1w_to-template_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, + "from-template_to-T1w_mode-image_desc-linear_xfm": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_xfm": {"Template": "T1w-template"}, "from-T1w_to-template_mode-image_warp": {"Template": "T1w-template"}, "from-longitudinal_to-template_mode-image_desc-linear_xfm": { @@ -1988,52 +2249,50 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False, "from-template_to-longitudinal_mode-image_desc-linear_xfm": { "Template": "T1w-template" }, - "from-longitudinal_to-template_mode-image_xfm": { - "Template": "T1w-template"}, + "from-longitudinal_to-template_mode-image_xfm": {"Template": "T1w-template"}, }, ) def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - - fsl, outputs = FSL_registration_connector(f'register_{opt}_anat_to_' - f'template_{pipe_num}', cfg, - orig='T1w', opt=opt) + fsl, outputs = FSL_registration_connector( + f"register_{opt}_anat_to_" f"template_{pipe_num}", cfg, orig="T1w", opt=opt + ) fsl.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'interpolation'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] fsl.inputs.inputspec.fnirt_config = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'fnirt_config'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] - connect, brain = \ - strat_pool.get_data(['desc-brain_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, fsl, 'inputspec.input_brain') + wf.connect(node, out, fsl, "inputspec.input_brain") - node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data("T1w-brain-template") + wf.connect(node, out, fsl, "inputspec.reference_brain") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl, "inputspec.reference_head") - node, out = strat_pool.get_data(["desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, fsl, 'inputspec.input_head') + node, out = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + ) + wf.connect(node, out, fsl, "inputspec.input_head") - node, out = strat_pool.get_data('template-ref-mask') - wf.connect(node, out, fsl, 'inputspec.reference_mask') + node, out = strat_pool.get_data("template-ref-mask") + wf.connect(node, out, fsl, "inputspec.reference_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs.keys(): - if 'from-T1w' in key: - new_key = key.replace('from-T1w', 'from-longitudinal') + if "from-T1w" in key: + new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] del outputs[key] - if 'to-T1w' in key: - new_key = key.replace('to-T1w', 'to-longitudinal') + if "to-T1w" in key: + new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] del outputs[key] @@ -2079,50 +2338,51 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, }, ) -def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - - fsl, outputs = FSL_registration_connector(f'register_{opt}_anat_to_' - f'template_symmetric_' - f'{pipe_num}', cfg, orig='T1w', - opt=opt, symmetric=True) +def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + fsl, outputs = FSL_registration_connector( + f"register_{opt}_anat_to_" f"template_symmetric_" f"{pipe_num}", + cfg, + orig="T1w", + opt=opt, + symmetric=True, + ) fsl.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'interpolation'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] fsl.inputs.inputspec.fnirt_config = cfg.registration_workflows[ - 'anatomical_registration']['registration']['FSL-FNIRT'][ - 'fnirt_config'] + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["fnirt_config"] - connect, brain = \ - strat_pool.get_data(['desc-brain_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, fsl, 'inputspec.input_brain') + wf.connect(node, out, fsl, "inputspec.input_brain") - node, out = strat_pool.get_data('T1w-brain-template-symmetric') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data("T1w-brain-template-symmetric") + wf.connect(node, out, fsl, "inputspec.reference_brain") - node, out = strat_pool.get_data(["desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, fsl, 'inputspec.input_head') + node, out = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + ) + wf.connect(node, out, fsl, "inputspec.input_head") - node, out = strat_pool.get_data('T1w-template-symmetric') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data("T1w-template-symmetric") + wf.connect(node, out, fsl, "inputspec.reference_head") - node, out = strat_pool.get_data('dilated-symmetric-brain-mask') - wf.connect(node, out, fsl, 'inputspec.reference_mask') + node, out = strat_pool.get_data("dilated-symmetric-brain-mask") + wf.connect(node, out, fsl, "inputspec.reference_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs.keys(): - if 'from-T1w' in key: - new_key = key.replace('from-T1w', 'from-longitudinal') + if "from-T1w" in key: + new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] del outputs[key] - if 'to-T1w' in key: - new_key = key.replace('to-T1w', 'to-longitudinal') + if "to-T1w" in key: + new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] del outputs[key] @@ -2131,8 +2391,7 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, @nodeblock( name="register_FSL_EPI_to_template", - config=["registration_workflows", "functional_registration", - "EPI_registration"], + config=["registration_workflows", "functional_registration", "EPI_registration"], switch=["run"], option_key="using", option_val=["FSL", "FSL-linear"], @@ -2149,42 +2408,43 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, "from-EPItemplate_to-bold_mode-image_desc-linear_xfm": { "Template": "EPI-template" }, - "from-bold_to-EPItemplate_mode-image_xfm": { - "Template": "EPI-template"}, + "from-bold_to-EPItemplate_mode-image_xfm": {"Template": "EPI-template"}, }, ) def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - '''Directly register the mean functional to an EPI template. No T1w + """Directly register the mean functional to an EPI template. No T1w involved. - ''' - - fsl, outputs = FSL_registration_connector(f'register_{opt}_EPI_to_' - f'template_{pipe_num}', cfg, - orig='bold', opt=opt, - template='EPI') + """ + fsl, outputs = FSL_registration_connector( + f"register_{opt}_EPI_to_" f"template_{pipe_num}", + cfg, + orig="bold", + opt=opt, + template="EPI", + ) - fsl.inputs.inputspec.interpolation = cfg['registration_workflows'][ - 'functional_registration']['EPI_registration']['FSL-FNIRT'][ - 'interpolation'] + fsl.inputs.inputspec.interpolation = cfg["registration_workflows"][ + "functional_registration" + ]["EPI_registration"]["FSL-FNIRT"]["interpolation"] - fsl.inputs.inputspec.fnirt_config = cfg['registration_workflows'][ - 'functional_registration']['EPI_registration']['FSL-FNIRT'][ - 'fnirt_config'] + fsl.inputs.inputspec.fnirt_config = cfg["registration_workflows"][ + "functional_registration" + ]["EPI_registration"]["FSL-FNIRT"]["fnirt_config"] - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, fsl, 'inputspec.input_brain') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, fsl, "inputspec.input_brain") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, fsl, 'inputspec.reference_brain') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, fsl, "inputspec.reference_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, fsl, 'inputspec.input_head') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, fsl, "inputspec.input_head") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, fsl, 'inputspec.reference_head') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, fsl, "inputspec.reference_head") - node, out = strat_pool.get_data('EPI-template-mask') - wf.connect(node, out, fsl, 'inputspec.reference_mask') + node, out = strat_pool.get_data("EPI-template-mask") + wf.connect(node, out, fsl, "inputspec.reference_mask") return (wf, outputs) @@ -2220,130 +2480,141 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={ "space-template_desc-preproc_T1w": { "Description": "The preprocessed T1w brain transformed to " - "template space.", + "template space.", "Template": "T1w-template", }, "from-T1w_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w native space " - "to T1w-template space.", + "to T1w-template space.", "Template": "T1w-template", }, "from-template_to-T1w_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w-template space " - "to T1w native space.", + "to T1w native space.", "Template": "T1w-template", }, "from-T1w_to-template_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from T1w native " - "space to T1w-template space.", + "space to T1w-template space.", "Template": "T1w-template", }, "from-template_to-T1w_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from " - "T1w-template space to T1w native space.", + "T1w-template space to T1w native space.", "Template": "T1w-template", }, "from-T1w_to-template_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "T1w native space to T1w-template space.", + "T1w native space to T1w-template space.", "Template": "T1w-template", }, "from-template_to-T1w_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "T1w-template space to T1w native space.", + "T1w-template space to T1w native space.", "Template": "T1w-template", }, "from-longitudinal_to-template_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from " - "longitudinal-template space to T1w-template " - "space.", + "longitudinal-template space to T1w-template " + "space.", "Template": "T1w-template", }, "from-template_to-longitudinal_mode-image_desc-linear_xfm": { "Description": "Linear (affine) transform from T1w-template " - "space to longitudinal-template space.", + "space to longitudinal-template space.", "Template": "T1w-template", }, "from-longitudinal_to-template_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from " - "longitudinal-template space to T1w-template " - "space.", + "longitudinal-template space to T1w-template " + "space.", "Template": "T1w-template", }, "from-template_to-longitudinal_mode-image_desc-nonlinear_xfm": { "Description": "Nonlinear (warp field) transform from " - "T1w-template space to longitudinal-template " - "space.", + "T1w-template space to longitudinal-template " + "space.", "Template": "T1w-template", }, "from-longitudinal_to-template_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "longitudinal-template space to T1w-template " - "space.", + "longitudinal-template space to T1w-template " + "space.", "Template": "T1w-template", }, "from-template_to-longitudinal_mode-image_xfm": { "Description": "Composite (affine + warp field) transform from " - "T1w-template space to longitudinal-template " - "space.", + "T1w-template space to longitudinal-template " + "space.", "Template": "T1w-template", }, }, ) def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + params = cfg.registration_workflows["anatomical_registration"]["registration"][ + "ANTs" + ]["T1_registration"] - params = cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['T1_registration'] - - ants_rc, outputs = ANTs_registration_connector('ANTS_T1_to_template_' - f'{pipe_num}', cfg, - params, orig='T1w') + ants_rc, outputs = ANTs_registration_connector( + "ANTS_T1_to_template_" f"{pipe_num}", cfg, params, orig="T1w" + ) ants_rc.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['ANTs']['interpolation'] + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] - connect, brain = \ - strat_pool.get_data(['desc-preproc_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, ants_rc, 'inputspec.input_brain') - - t1w_brain_template = strat_pool.node_data('T1w-brain-template') - wf.connect(t1w_brain_template.node, t1w_brain_template.out, - ants_rc, 'inputspec.reference_brain') + wf.connect(node, out, ants_rc, "inputspec.input_brain") + + t1w_brain_template = strat_pool.node_data("T1w-brain-template") + wf.connect( + t1w_brain_template.node, + t1w_brain_template.out, + ants_rc, + "inputspec.reference_brain", + ) # TODO check the order of T1w - node, out = strat_pool.get_data(["desc-restore_T1w", "desc-head_T1w", - "desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, ants_rc, 'inputspec.input_head') - + node, out = strat_pool.get_data( + [ + "desc-restore_T1w", + "desc-head_T1w", + "desc-preproc_T1w", + "space-longitudinal_desc-reorient_T1w", + ] + ) + wf.connect(node, out, ants_rc, "inputspec.input_head") - t1w_template = strat_pool.node_data('T1w-template') - wf.connect(t1w_template.node, t1w_template.out, - ants_rc, 'inputspec.reference_head') + t1w_template = strat_pool.node_data("T1w-template") + wf.connect(t1w_template.node, t1w_template.out, ants_rc, "inputspec.reference_head") - brain_mask = strat_pool.node_data(["space-T1w_desc-brain_mask", - "space-longitudinal_desc-brain_mask", - "space-T1w_desc-acpcbrain_mask"]) - wf.connect(brain_mask.node, brain_mask.out, - ants_rc, 'inputspec.input_mask') + brain_mask = strat_pool.node_data( + [ + "space-T1w_desc-brain_mask", + "space-longitudinal_desc-brain_mask", + "space-T1w_desc-acpcbrain_mask", + ] + ) + wf.connect(brain_mask.node, brain_mask.out, ants_rc, "inputspec.input_mask") - if strat_pool.check_rpool('T1w-brain-template-mask'): - node, out = strat_pool.get_data('T1w-brain-template-mask') - wf.connect(node, out, ants_rc, 'inputspec.reference_mask') + if strat_pool.check_rpool("T1w-brain-template-mask"): + node, out = strat_pool.get_data("T1w-brain-template-mask") + wf.connect(node, out, ants_rc, "inputspec.reference_mask") - if strat_pool.check_rpool('label-lesion_mask'): - node, out = strat_pool.get_data('label-lesion_mask') - wf.connect(node, out, ants_rc, 'inputspec.lesion_mask') + if strat_pool.check_rpool("label-lesion_mask"): + node, out = strat_pool.get_data("label-lesion_mask") + wf.connect(node, out, ants_rc, "inputspec.lesion_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs: - for direction in ['from', 'to']: - if f'{direction}-T1w' in key: - new_key = key.replace(f'{direction}-T1w', - f'{direction}-longitudinal') + for direction in ["from", "to"]: + if f"{direction}-T1w" in key: + new_key = key.replace( + f"{direction}-T1w", f"{direction}-longitudinal" + ) outputs[new_key] = outputs[key] del outputs[key] return (wf, outputs) @@ -2358,8 +2629,7 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): inputs=[ ( ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], - ["space-T1w_desc-brain_mask", - "space-longitudinal_desc-brain_mask"], + ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"], [ "desc-head_T1w", "desc-preproc_T1w", @@ -2413,56 +2683,60 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, }, ) -def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - - params = cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['T1_registration'] - - ants, outputs = ANTs_registration_connector('ANTS_T1_to_template_' - f'symmetric_{pipe_num}', cfg, - params, orig='T1w', - symmetric=True) +def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + params = cfg.registration_workflows["anatomical_registration"]["registration"][ + "ANTs" + ]["T1_registration"] + + ants, outputs = ANTs_registration_connector( + "ANTS_T1_to_template_" f"symmetric_{pipe_num}", + cfg, + params, + orig="T1w", + symmetric=True, + ) ants.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'anatomical_registration']['registration']['ANTs']['interpolation'] + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] - connect, brain = \ - strat_pool.get_data(['desc-preproc_T1w', - 'space-longitudinal_desc-brain_T1w'], - report_fetched=True) + connect, brain = strat_pool.get_data( + ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ) node, out = connect - wf.connect(node, out, ants, 'inputspec.input_brain') + wf.connect(node, out, ants, "inputspec.input_brain") - node, out = strat_pool.get_data('T1w-brain-template-symmetric') - wf.connect(node, out, ants, 'inputspec.reference_brain') + node, out = strat_pool.get_data("T1w-brain-template-symmetric") + wf.connect(node, out, ants, "inputspec.reference_brain") - node, out = strat_pool.get_data(["desc-head_T1w", "desc-preproc_T1w", - "space-longitudinal_desc-reorient_T1w"]) - wf.connect(node, out, ants, 'inputspec.input_head') + node, out = strat_pool.get_data( + ["desc-head_T1w", "desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + ) + wf.connect(node, out, ants, "inputspec.input_head") - node, out = strat_pool.get_data('T1w-template-symmetric') - wf.connect(node, out, ants, 'inputspec.reference_head') + node, out = strat_pool.get_data("T1w-template-symmetric") + wf.connect(node, out, ants, "inputspec.reference_head") - node, out = strat_pool.get_data(["space-T1w_desc-brain_mask", - "space-longitudinal_desc-brain_mask"]) - wf.connect(node, out, ants, 'inputspec.input_mask') + node, out = strat_pool.get_data( + ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"] + ) + wf.connect(node, out, ants, "inputspec.input_mask") - node, out = strat_pool.get_data('dilated-symmetric-brain-mask') - wf.connect(node, out, ants, 'inputspec.reference_mask') + node, out = strat_pool.get_data("dilated-symmetric-brain-mask") + wf.connect(node, out, ants, "inputspec.reference_mask") - if strat_pool.check_rpool('label-lesion_mask'): - node, out = strat_pool.get_data('label-lesion_mask') - wf.connect(node, out, ants, 'inputspec.lesion_mask') + if strat_pool.check_rpool("label-lesion_mask"): + node, out = strat_pool.get_data("label-lesion_mask") + wf.connect(node, out, ants, "inputspec.lesion_mask") - if 'space-longitudinal' in brain: + if "space-longitudinal" in brain: for key in outputs.keys(): - if 'from-T1w' in key: - new_key = key.replace('from-T1w', 'from-longitudinal') + if "from-T1w" in key: + new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] del outputs[key] - if 'to-T1w' in key: - new_key = key.replace('to-T1w', 'to-longitudinal') + if "to-T1w" in key: + new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] del outputs[key] @@ -2494,45 +2768,48 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, "from-EPItemplate_to-bold_mode-image_desc-nonlinear_xfm": { "Template": "EPI-template" }, - "from-bold_to-EPItemplate_mode-image_xfm": { - "Template": "EPI-template"}, - "from-EPItemplate_to-bold_mode-image_xfm": { - "Template": "EPI-template"}, + "from-bold_to-EPItemplate_mode-image_xfm": {"Template": "EPI-template"}, + "from-EPItemplate_to-bold_mode-image_xfm": {"Template": "EPI-template"}, }, ) def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - '''Directly register the mean functional to an EPI template. No T1w + """Directly register the mean functional to an EPI template. No T1w involved. - ''' - params = cfg.registration_workflows['functional_registration'][ - 'EPI_registration']['ANTs']['parameters'] - - ants, outputs = ANTs_registration_connector('ANTS_bold_to_EPI-template' - f'_{pipe_num}', cfg, params, - orig='bold', template='EPI') + """ + params = cfg.registration_workflows["functional_registration"]["EPI_registration"][ + "ANTs" + ]["parameters"] + + ants, outputs = ANTs_registration_connector( + "ANTS_bold_to_EPI-template" f"_{pipe_num}", + cfg, + params, + orig="bold", + template="EPI", + ) ants.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['EPI_registration']['ANTs'][ - 'interpolation'] + "functional_registration" + ]["EPI_registration"]["ANTs"]["interpolation"] - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, ants, 'inputspec.input_brain') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, ants, "inputspec.input_brain") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, ants, 'inputspec.reference_brain') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, ants, "inputspec.reference_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, ants, 'inputspec.input_head') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, ants, "inputspec.input_head") - node, out = strat_pool.get_data('EPI-template') - wf.connect(node, out, ants, 'inputspec.reference_head') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, ants, "inputspec.reference_head") - node, out = strat_pool.get_data('space-bold_desc-brain_mask') - wf.connect(node, out, ants, 'inputspec.input_mask') + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, ants, "inputspec.input_mask") - if strat_pool.check_rpool('EPI-template-mask'): - node, out = strat_pool.get_data('EPI-template-mask') - wf.connect(node, out, ants, 'inputspec.reference_mask') + if strat_pool.check_rpool("EPI-template-mask"): + node, out = strat_pool.get_data("EPI-template-mask") + wf.connect(node, out, ants, "inputspec.reference_mask") return (wf, outputs) @@ -2559,8 +2836,7 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ( "desc-restore-brain_T1w", ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], - ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", - "T1w"], + ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", "T1w"], ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], "space-T1w_desc-brain_mask", "T1w-template", @@ -2578,16 +2854,12 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "from-template_to-T1w_mode-image_xfm": {"Template": "T1w-template"}, }, ) -def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, - opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') +def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - if opt.lower() == 'fsl' and reg_tool.lower() == 'ants': - + if opt.lower() == "fsl" and reg_tool.lower() == "ants": # Apply head-to-head transforms on brain using ABCD-style registration # Convert ANTs warps to FSL warps to be consistent with the functional registration # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/AtlasRegistrationToMNI152_ANTsbased.sh#L134-L172 @@ -2598,20 +2870,22 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, # -t ${WD}/xfms/T1w_to_MNI_1Rigid.mat \ # -t ${WD}/xfms/T1w_to_MNI_0DerivedInitialMovingTranslation.mat \ # -o [${WD}/xfms/ANTs_CombinedWarp.nii.gz,1] - ants_apply_warp_t1_to_template = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_T1_to_template_{pipe_num}') + ants_apply_warp_t1_to_template = pe.Node( + interface=ants.ApplyTransforms(), + name=f"ANTS-ABCD_T1_to_template_{pipe_num}", + ) ants_apply_warp_t1_to_template.inputs.dimension = 3 ants_apply_warp_t1_to_template.inputs.print_out_composite_warp_file = True - ants_apply_warp_t1_to_template.inputs.output_image = 'ANTs_CombinedWarp.nii.gz' + ants_apply_warp_t1_to_template.inputs.output_image = "ANTs_CombinedWarp.nii.gz" - node, out = strat_pool.get_data(['desc-restore_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, ants_apply_warp_t1_to_template, 'input_image') + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, ants_apply_warp_t1_to_template, "input_image") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_t1_to_template, 'reference_image') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, ants_apply_warp_t1_to_template, "reference_image") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, ants_apply_warp_t1_to_template, 'transforms') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, ants_apply_warp_t1_to_template, "transforms") # antsApplyTransforms -d 3 -i ${T1wImage}.nii.gz -r ${Reference} \ # -t [${WD}/xfms/T1w_to_MNI_0DerivedInitialMovingTranslation.mat,1] \ @@ -2621,161 +2895,182 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, # -o [${WD}/xfms/ANTs_CombinedInvWarp.nii.gz,1] # T1wImage is ACPC aligned head - ants_apply_warp_template_to_t1 = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_template_to_T1_{pipe_num}') + ants_apply_warp_template_to_t1 = pe.Node( + interface=ants.ApplyTransforms(), + name=f"ANTS-ABCD_template_to_T1_{pipe_num}", + ) ants_apply_warp_template_to_t1.inputs.dimension = 3 ants_apply_warp_template_to_t1.inputs.print_out_composite_warp_file = True - ants_apply_warp_template_to_t1.inputs.output_image = 'ANTs_CombinedInvWarp.nii.gz' + ants_apply_warp_template_to_t1.inputs.output_image = ( + "ANTs_CombinedInvWarp.nii.gz" + ) - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, ants_apply_warp_template_to_t1, 'input_image') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, ants_apply_warp_template_to_t1, "input_image") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_template_to_t1, 'reference_image') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, ants_apply_warp_template_to_t1, "reference_image") - node, out = strat_pool.get_data('from-template_to-T1w_mode-image_xfm') - wf.connect(node, out, ants_apply_warp_template_to_t1, 'transforms') + node, out = strat_pool.get_data("from-template_to-T1w_mode-image_xfm") + wf.connect(node, out, ants_apply_warp_template_to_t1, "transforms") # c4d -mcs ${WD}/xfms/ANTs_CombinedWarp.nii.gz -oo ${WD}/xfms/e1.nii.gz ${WD}/xfms/e2.nii.gz ${WD}/xfms/e3.nii.gz # -mcs: -multicomponent-split, -oo: -output-multiple - split_combined_warp = pe.Node(util.Function(input_names=['input', - 'output_name'], - output_names=['output1', - 'output2', - 'output3'], - function=run_c4d), - name=f'split_combined_warp_{pipe_num}') - split_combined_warp.inputs.output_name = 'e' - - wf.connect(ants_apply_warp_t1_to_template, 'output_image', - split_combined_warp, 'input') + split_combined_warp = pe.Node( + util.Function( + input_names=["input", "output_name"], + output_names=["output1", "output2", "output3"], + function=run_c4d, + ), + name=f"split_combined_warp_{pipe_num}", + ) + split_combined_warp.inputs.output_name = "e" + + wf.connect( + ants_apply_warp_t1_to_template, "output_image", split_combined_warp, "input" + ) # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz - split_combined_inv_warp = pe.Node(util.Function(input_names=['input', - 'output_name'], - output_names=['output1', - 'output2', - 'output3'], - function=run_c4d), - name=f'split_combined_inv_warp_{pipe_num}') - split_combined_inv_warp.inputs.output_name = 'einv' - - wf.connect(ants_apply_warp_template_to_t1, 'output_image', - split_combined_inv_warp, 'input') + split_combined_inv_warp = pe.Node( + util.Function( + input_names=["input", "output_name"], + output_names=["output1", "output2", "output3"], + function=run_c4d, + ), + name=f"split_combined_inv_warp_{pipe_num}", + ) + split_combined_inv_warp.inputs.output_name = "einv" + + wf.connect( + ants_apply_warp_template_to_t1, + "output_image", + split_combined_inv_warp, + "input", + ) # fslmaths ${WD}/xfms/e2.nii.gz -mul -1 ${WD}/xfms/e-2.nii.gz - change_e2_sign = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'change_e2_sign_{pipe_num}') - change_e2_sign.inputs.args = '-mul -1' + change_e2_sign = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"change_e2_sign_{pipe_num}" + ) + change_e2_sign.inputs.args = "-mul -1" - wf.connect(split_combined_warp, 'output2', - change_e2_sign, 'in_file') + wf.connect(split_combined_warp, "output2", change_e2_sign, "in_file") # fslmaths ${WD}/xfms/e2inv.nii.gz -mul -1 ${WD}/xfms/e-2inv.nii.gz - change_e2inv_sign = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'change_e2inv_sign_{pipe_num}') - change_e2inv_sign.inputs.args = '-mul -1' + change_e2inv_sign = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"change_e2inv_sign_{pipe_num}" + ) + change_e2inv_sign.inputs.args = "-mul -1" - wf.connect(split_combined_inv_warp, 'output2', - change_e2inv_sign, 'in_file') + wf.connect(split_combined_inv_warp, "output2", change_e2inv_sign, "in_file") # fslmerge -t ${OutputTransform} ${WD}/xfms/e1.nii.gz ${WD}/xfms/e-2.nii.gz ${WD}/xfms/e3.nii.gz - merge_xfms_to_list = pe.Node(util.Merge(3), - name=f'merge_t1_to_template_xfms_to_list_{pipe_num}') + merge_xfms_to_list = pe.Node( + util.Merge(3), name=f"merge_t1_to_template_xfms_to_list_{pipe_num}" + ) - wf.connect(split_combined_warp, 'output1', - merge_xfms_to_list, 'in1') - wf.connect(change_e2_sign, 'out_file', - merge_xfms_to_list, 'in2') - wf.connect(split_combined_warp, 'output3', - merge_xfms_to_list, 'in3') + wf.connect(split_combined_warp, "output1", merge_xfms_to_list, "in1") + wf.connect(change_e2_sign, "out_file", merge_xfms_to_list, "in2") + wf.connect(split_combined_warp, "output3", merge_xfms_to_list, "in3") - merge_xfms = pe.Node(interface=fslMerge(), - name=f'merge_t1_to_template_xfms_{pipe_num}') - merge_xfms.inputs.dimension = 't' + merge_xfms = pe.Node( + interface=fslMerge(), name=f"merge_t1_to_template_xfms_{pipe_num}" + ) + merge_xfms.inputs.dimension = "t" - wf.connect(merge_xfms_to_list, 'out', - merge_xfms, 'in_files') + wf.connect(merge_xfms_to_list, "out", merge_xfms, "in_files") # fslmerge -t ${OutputInvTransform} ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e-2inv.nii.gz ${WD}/xfms/e3inv.nii.gz - merge_inv_xfms_to_list = pe.Node(util.Merge(3), - name=f'merge_template_to_t1_xfms_to_list_{pipe_num}') + merge_inv_xfms_to_list = pe.Node( + util.Merge(3), name=f"merge_template_to_t1_xfms_to_list_{pipe_num}" + ) - wf.connect(split_combined_inv_warp, 'output1', - merge_inv_xfms_to_list, 'in1') - wf.connect(change_e2inv_sign, 'out_file', - merge_inv_xfms_to_list, 'in2') - wf.connect(split_combined_inv_warp, 'output3', - merge_inv_xfms_to_list, 'in3') + wf.connect(split_combined_inv_warp, "output1", merge_inv_xfms_to_list, "in1") + wf.connect(change_e2inv_sign, "out_file", merge_inv_xfms_to_list, "in2") + wf.connect(split_combined_inv_warp, "output3", merge_inv_xfms_to_list, "in3") - merge_inv_xfms = pe.Node(interface=fslMerge(), - name=f'merge_template_to_t1_xfms_{pipe_num}') - merge_inv_xfms.inputs.dimension = 't' + merge_inv_xfms = pe.Node( + interface=fslMerge(), name=f"merge_template_to_t1_xfms_{pipe_num}" + ) + merge_inv_xfms.inputs.dimension = "t" - wf.connect(merge_inv_xfms_to_list, 'out', - merge_inv_xfms, 'in_files') + wf.connect(merge_inv_xfms_to_list, "out", merge_inv_xfms, "in_files") # applywarp --rel --interp=spline -i ${T1wRestore} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestore} - fsl_apply_warp_t1_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_to_template_{pipe_num}') + fsl_apply_warp_t1_to_template = pe.Node( + interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_to_template_{pipe_num}" + ) fsl_apply_warp_t1_to_template.inputs.relwarp = True - fsl_apply_warp_t1_to_template.inputs.interp = 'spline' + fsl_apply_warp_t1_to_template.inputs.interp = "spline" - node, out = strat_pool.get_data(['desc-restore_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, fsl_apply_warp_t1_to_template, 'in_file') + node, out = strat_pool.get_data(["desc-restore_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, fsl_apply_warp_t1_to_template, "in_file") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl_apply_warp_t1_to_template, 'ref_file') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl_apply_warp_t1_to_template, "ref_file") - wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_to_template, 'field_file') + wf.connect( + merge_xfms, "merged_file", fsl_apply_warp_t1_to_template, "field_file" + ) # applywarp --rel --interp=nn -i ${T1wRestoreBrain} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestoreBrain} - fsl_apply_warp_t1_brain_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_brain_to_template_{pipe_num}') + fsl_apply_warp_t1_brain_to_template = pe.Node( + interface=fsl.ApplyWarp(), name=f"FSL-ABCD_T1_brain_to_template_{pipe_num}" + ) fsl_apply_warp_t1_brain_to_template.inputs.relwarp = True - fsl_apply_warp_t1_brain_to_template.inputs.interp = 'nn' + fsl_apply_warp_t1_brain_to_template.inputs.interp = "nn" # TODO connect T1wRestoreBrain, check T1wRestoreBrain quality - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, 'in_file') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "in_file") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, 'ref_file') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, "ref_file") - wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_brain_to_template, 'field_file') + wf.connect( + merge_xfms, "merged_file", fsl_apply_warp_t1_brain_to_template, "field_file" + ) - fsl_apply_warp_t1_brain_mask_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_brain_mask_to_template_{pipe_num}') + fsl_apply_warp_t1_brain_mask_to_template = pe.Node( + interface=fsl.ApplyWarp(), + name=f"FSL-ABCD_T1_brain_mask_to_template_{pipe_num}", + ) fsl_apply_warp_t1_brain_mask_to_template.inputs.relwarp = True - fsl_apply_warp_t1_brain_mask_to_template.inputs.interp = 'nn' + fsl_apply_warp_t1_brain_mask_to_template.inputs.interp = "nn" - node, out = strat_pool.get_data('space-T1w_desc-brain_mask') - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, 'in_file') + node, out = strat_pool.get_data("space-T1w_desc-brain_mask") + wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "in_file") - node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, 'ref_file') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, "ref_file") - wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_brain_mask_to_template, 'field_file') + wf.connect( + merge_xfms, + "merged_file", + fsl_apply_warp_t1_brain_mask_to_template, + "field_file", + ) # fslmaths ${OutputT1wImageRestore} -mas ${OutputT1wImageRestoreBrain} ${OutputT1wImageRestoreBrain} - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_t1_brain_{pipe_num}') + apply_mask = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_t1_brain_{pipe_num}" + ) - wf.connect(fsl_apply_warp_t1_to_template, 'out_file', - apply_mask, 'in_file') + wf.connect(fsl_apply_warp_t1_to_template, "out_file", apply_mask, "in_file") - wf.connect(fsl_apply_warp_t1_brain_to_template, 'out_file', - apply_mask, 'mask_file') + wf.connect( + fsl_apply_warp_t1_brain_to_template, "out_file", apply_mask, "mask_file" + ) outputs = { - 'space-template_desc-preproc_T1w': (apply_mask, 'out_file'), - 'space-template_desc-head_T1w': (fsl_apply_warp_t1_to_template, 'out_file'), - 'space-template_desc-T1w_mask': (fsl_apply_warp_t1_brain_mask_to_template, 'out_file'), - 'from-T1w_to-template_mode-image_xfm': (merge_xfms, 'merged_file'), - 'from-template_to-T1w_mode-image_xfm': (merge_inv_xfms, 'merged_file') + "space-template_desc-preproc_T1w": (apply_mask, "out_file"), + "space-template_desc-head_T1w": (fsl_apply_warp_t1_to_template, "out_file"), + "space-template_desc-T1w_mask": ( + fsl_apply_warp_t1_brain_mask_to_template, + "out_file", + ), + "from-T1w_to-template_mode-image_xfm": (merge_xfms, "merged_file"), + "from-template_to-T1w_mode-image_xfm": (merge_inv_xfms, "merged_file"), } return (wf, outputs) @@ -2796,40 +3091,37 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): - - get_func_volume = pe.Node(interface=afni.Calc(), - name=f'get_func_volume_{pipe_num}') + get_func_volume = pe.Node(interface=afni.Calc(), name=f"get_func_volume_{pipe_num}") get_func_volume.inputs.set( - expr='a', - single_idx=cfg.registration_workflows['functional_registration']['coregistration'][ - 'func_input_prep']['Selected Functional Volume']['func_reg_input_volume'], - outputtype='NIFTI_GZ' + expr="a", + single_idx=cfg.registration_workflows["functional_registration"][ + "coregistration" + ]["func_input_prep"]["Selected Functional Volume"]["func_reg_input_volume"], + outputtype="NIFTI_GZ", ) - if not cfg.registration_workflows['functional_registration'][ - 'coregistration']['func_input_prep']['reg_with_skull']: + if not cfg.registration_workflows["functional_registration"]["coregistration"][ + "func_input_prep" + ]["reg_with_skull"]: node, out = strat_pool.get_data("desc-brain_bold") else: # TODO check which file is functional_skull_leaf # TODO add a function to choose brain or skull? node, out = strat_pool.get_data(["desc-motion_bold", "bold"]) - wf.connect(node, out, get_func_volume, 'in_file_a') + wf.connect(node, out, get_func_volume, "in_file_a") - coreg_input = (get_func_volume, 'out_file') + coreg_input = (get_func_volume, "out_file") - outputs = { - 'sbref': coreg_input - } + outputs = {"sbref": coreg_input} return (wf, outputs) @nodeblock( name="coregistration_prep_mean", - switch=[["functional_preproc", "run"], - ["functional_preproc", "coreg_prep", "run"]], + switch=[["functional_preproc", "run"], ["functional_preproc", "coreg_prep", "run"]], option_key=[ "registration_workflows", "functional_registration", @@ -2842,38 +3134,34 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): - coreg_input = strat_pool.get_data("desc-mean_bold") # TODO add mean skull - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['func_input_prep']['Mean Functional'][ - 'n4_correct_func']: + if cfg.registration_workflows["functional_registration"]["coregistration"][ + "func_input_prep" + ]["Mean Functional"]["n4_correct_func"]: n4_correct_func = pe.Node( - interface= - ants.N4BiasFieldCorrection(dimension=3, - copy_header=True, - bspline_fitting_distance=200), + interface=ants.N4BiasFieldCorrection( + dimension=3, copy_header=True, bspline_fitting_distance=200 + ), shrink_factor=2, - name=f'func_mean_n4_corrected_{pipe_num}') - n4_correct_func.inputs.args = '-r True' + name=f"func_mean_n4_corrected_{pipe_num}", + ) + n4_correct_func.inputs.args = "-r True" node, out = coreg_input - wf.connect(node, out, n4_correct_func, 'input_image') + wf.connect(node, out, n4_correct_func, "input_image") - coreg_input = (n4_correct_func, 'output_image') + coreg_input = (n4_correct_func, "output_image") - outputs = { - 'sbref': coreg_input - } + outputs = {"sbref": coreg_input} return (wf, outputs) @nodeblock( name="coregistration_prep_fmriprep", - switch=[["functional_preproc", "run"], - ["functional_preproc", "coreg_prep", "run"]], + switch=[["functional_preproc", "run"], ["functional_preproc", "coreg_prep", "run"]], option_key=[ "registration_workflows", "functional_registration", @@ -2886,12 +3174,9 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): - coreg_input = strat_pool.get_data("desc-ref_bold") - outputs = { - 'sbref': coreg_input - } + outputs = {"sbref": coreg_input} return (wf, outputs) @@ -2930,158 +3215,181 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ) def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): diff_complete = False - if strat_pool.check_rpool("despiked-fieldmap") and \ - strat_pool.check_rpool("fieldmap-mask"): + if strat_pool.check_rpool("despiked-fieldmap") and strat_pool.check_rpool( + "fieldmap-mask" + ): diff_complete = True - if strat_pool.check_rpool('T2w') and cfg.anatomical_preproc['run_t2']: + if strat_pool.check_rpool("T2w") and cfg.anatomical_preproc["run_t2"]: # monkey data - func_to_anat = create_register_func_to_anat_use_T2(cfg, - f'func_to_anat_FLIRT_' - f'{pipe_num}') + func_to_anat = create_register_func_to_anat_use_T2( + cfg, f"func_to_anat_FLIRT_" f"{pipe_num}" + ) # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 # fslmaths "$fMRIFolder"/"$NameOffMRI"_mc -Tmean "$fMRIFolder"/"$ScoutName"_gdc - func_mc_mean = pe.Node(interface=afni_utils.TStat(), - name=f'func_motion_corrected_mean_{pipe_num}') + func_mc_mean = pe.Node( + interface=afni_utils.TStat(), name=f"func_motion_corrected_mean_{pipe_num}" + ) - func_mc_mean.inputs.options = '-mean' - func_mc_mean.inputs.outputtype = 'NIFTI_GZ' + func_mc_mean.inputs.options = "-mean" + func_mc_mean.inputs.outputtype = "NIFTI_GZ" node, out = strat_pool.get_data("desc-motion_bold") - wf.connect(node, out, func_mc_mean, 'in_file') + wf.connect(node, out, func_mc_mean, "in_file") - wf.connect(func_mc_mean, 'out_file', func_to_anat, 'inputspec.func') + wf.connect(func_mc_mean, "out_file", func_to_anat, "inputspec.func") - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, func_to_anat, 'inputspec.T1_brain') + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, func_to_anat, "inputspec.T1_brain") - node, out = strat_pool.get_data('desc-head_T2w') - wf.connect(node, out, func_to_anat, 'inputspec.T2_head') + node, out = strat_pool.get_data("desc-head_T2w") + wf.connect(node, out, func_to_anat, "inputspec.T2_head") - node, out = strat_pool.get_data('desc-preproc_T2w') - wf.connect(node, out, func_to_anat, 'inputspec.T2_brain') + node, out = strat_pool.get_data("desc-preproc_T2w") + wf.connect(node, out, func_to_anat, "inputspec.T2_brain") else: # if field map-based distortion correction is on, but BBR is off, # send in the distortion correction files here - func_to_anat = create_register_func_to_anat(cfg, diff_complete, - f'func_to_anat_FLIRT_' - f'{pipe_num}') + func_to_anat = create_register_func_to_anat( + cfg, diff_complete, f"func_to_anat_FLIRT_" f"{pipe_num}" + ) func_to_anat.inputs.inputspec.dof = cfg.registration_workflows[ - 'functional_registration']['coregistration']['dof'] + "functional_registration" + ]["coregistration"]["dof"] func_to_anat.inputs.inputspec.interp = cfg.registration_workflows[ - 'functional_registration']['coregistration']['interpolation'] + "functional_registration" + ]["coregistration"]["interpolation"] - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, func_to_anat, 'inputspec.func') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, func_to_anat, "inputspec.func") - node, out = strat_pool.get_data(['desc-restore-brain_T1w', 'desc-preproc_T1w']) - wf.connect(node, out, func_to_anat, 'inputspec.anat') + node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"]) + wf.connect(node, out, func_to_anat, "inputspec.anat") if diff_complete: - node, out = strat_pool.get_data('effectiveEchoSpacing') - wf.connect(node, out, func_to_anat, 'echospacing_input.echospacing') + node, out = strat_pool.get_data("effectiveEchoSpacing") + wf.connect(node, out, func_to_anat, "echospacing_input.echospacing") - node, out = strat_pool.get_data('pe-direction') - wf.connect(node, out, func_to_anat, 'pedir_input.pedir') + node, out = strat_pool.get_data("pe-direction") + wf.connect(node, out, func_to_anat, "pedir_input.pedir") node, out = strat_pool.get_data("despiked-fieldmap") - wf.connect(node, out, func_to_anat, 'inputspec.fieldmap') + wf.connect(node, out, func_to_anat, "inputspec.fieldmap") node, out = strat_pool.get_data("fieldmap-mask") - wf.connect(node, out, func_to_anat, 'inputspec.fieldmapmask') + wf.connect(node, out, func_to_anat, "inputspec.fieldmapmask") - if strat_pool.check_rpool('T2w') and cfg.anatomical_preproc['run_t2']: + if strat_pool.check_rpool("T2w") and cfg.anatomical_preproc["run_t2"]: outputs = { - 'space-T1w_sbref': - (func_to_anat, 'outputspec.anat_func_nobbreg'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': - (func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg'), - 'from-bold_to-T1w_mode-image_desc-linear_warp': - (func_to_anat, 'outputspec.func_to_anat_linear_warp_nobbreg') + "space-T1w_sbref": (func_to_anat, "outputspec.anat_func_nobbreg"), + "from-bold_to-T1w_mode-image_desc-linear_xfm": ( + func_to_anat, + "outputspec.func_to_anat_linear_xfm_nobbreg", + ), + "from-bold_to-T1w_mode-image_desc-linear_warp": ( + func_to_anat, + "outputspec.func_to_anat_linear_warp_nobbreg", + ), } else: outputs = { - 'space-T1w_sbref': - (func_to_anat, 'outputspec.anat_func_nobbreg'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': - (func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg') + "space-T1w_sbref": (func_to_anat, "outputspec.anat_func_nobbreg"), + "from-bold_to-T1w_mode-image_desc-linear_xfm": ( + func_to_anat, + "outputspec.func_to_anat_linear_xfm_nobbreg", + ), } - if True in cfg.registration_workflows['functional_registration'][ - 'coregistration']["boundary_based_registration"]["run"]: - - func_to_anat_bbreg = create_bbregister_func_to_anat(diff_complete, - f'func_to_anat_' - f'bbreg_' - f'{pipe_num}') - func_to_anat_bbreg.inputs.inputspec.bbr_schedule = \ - cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'bbr_schedule'] - - func_to_anat_bbreg.inputs.inputspec.bbr_wm_mask_args = \ - cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'bbr_wm_mask_args'] - - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.func') - - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'whole-head': - node, out = strat_pool.get_data('desc-head_T1w') - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') - - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'brain': - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') - - wf.connect(func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg', - func_to_anat_bbreg, 'inputspec.linear_reg_matrix') - - if strat_pool.check_rpool('space-bold_label-WM_mask'): + if ( + True + in cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["run"] + ): + func_to_anat_bbreg = create_bbregister_func_to_anat( + diff_complete, f"func_to_anat_" f"bbreg_" f"{pipe_num}" + ) + func_to_anat_bbreg.inputs.inputspec.bbr_schedule = cfg.registration_workflows[ + "functional_registration" + ]["coregistration"]["boundary_based_registration"]["bbr_schedule"] + + func_to_anat_bbreg.inputs.inputspec.bbr_wm_mask_args = ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["bbr_wm_mask_args"] + ) + + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, func_to_anat_bbreg, "inputspec.func") + + if ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "whole-head" + ): + node, out = strat_pool.get_data("desc-head_T1w") + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat") + + elif ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "brain" + ): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat") + + wf.connect( + func_to_anat, + "outputspec.func_to_anat_linear_xfm_nobbreg", + func_to_anat_bbreg, + "inputspec.linear_reg_matrix", + ) + + if strat_pool.check_rpool("space-bold_label-WM_mask"): node, out = strat_pool.get_data(["space-bold_label-WM_mask"]) - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat_wm_segmentation") else: - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'probability_map': - node, out = strat_pool.get_data(["label-WM_probseg", - "label-WM_mask"]) - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'partial_volume_map': - node, out = strat_pool.get_data(["label-WM_pveseg", - "label-WM_mask"]) - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') + if ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["bbr_wm_map"] + == "probability_map" + ): + node, out = strat_pool.get_data(["label-WM_probseg", "label-WM_mask"]) + elif ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["bbr_wm_map"] + == "partial_volume_map" + ): + node, out = strat_pool.get_data(["label-WM_pveseg", "label-WM_mask"]) + wf.connect(node, out, func_to_anat_bbreg, "inputspec.anat_wm_segmentation") if diff_complete: - node, out = strat_pool.get_data('effectiveEchoSpacing') - wf.connect(node, out, - func_to_anat_bbreg, 'echospacing_input.echospacing') + node, out = strat_pool.get_data("effectiveEchoSpacing") + wf.connect(node, out, func_to_anat_bbreg, "echospacing_input.echospacing") - node, out = strat_pool.get_data('pe-direction') - wf.connect(node, out, func_to_anat_bbreg, 'pedir_input.pedir') + node, out = strat_pool.get_data("pe-direction") + wf.connect(node, out, func_to_anat_bbreg, "pedir_input.pedir") node, out = strat_pool.get_data("despiked-fieldmap") - wf.connect(node, out, func_to_anat_bbreg, 'inputspec.fieldmap') + wf.connect(node, out, func_to_anat_bbreg, "inputspec.fieldmap") node, out = strat_pool.get_data("fieldmap-mask") - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.fieldmapmask') + wf.connect(node, out, func_to_anat_bbreg, "inputspec.fieldmapmask") outputs = { - 'space-T1w_sbref': - (func_to_anat_bbreg, 'outputspec.anat_func'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': - (func_to_anat_bbreg, 'outputspec.func_to_anat_linear_xfm') + "space-T1w_sbref": (func_to_anat_bbreg, "outputspec.anat_func"), + "from-bold_to-T1w_mode-image_desc-linear_xfm": ( + func_to_anat_bbreg, + "outputspec.func_to_anat_linear_xfm", + ), } return (wf, outputs) @@ -3121,51 +3429,48 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): - '''Condense the BOLD-to-T1 coregistration transform and the T1-to-template + """Condense the BOLD-to-T1 coregistration transform and the T1-to-template transform into one transform matrix. - ''' - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + """ + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - xfm, outputs = bold_to_T1template_xfm_connector('create_func_to_T1w' - f'template_xfm_{pipe_num}', - cfg, reg_tool, - symmetric=False) + xfm, outputs = bold_to_T1template_xfm_connector( + "create_func_to_T1w" f"template_xfm_{pipe_num}", cfg, reg_tool, symmetric=False + ) - node, out = strat_pool.get_data( - 'from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, xfm, 'inputspec.coreg_xfm') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, xfm, "inputspec.coreg_xfm") - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, xfm, 'inputspec.input_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, xfm, "inputspec.input_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, xfm, 'inputspec.mean_bold') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, xfm, "inputspec.mean_bold") - node, out = strat_pool.get_data('T1w-brain-template-funcreg') - wf.connect(node, out, xfm, 'inputspec.T1w-brain-template_funcreg') + node, out = strat_pool.get_data("T1w-brain-template-funcreg") + wf.connect(node, out, xfm, "inputspec.T1w-brain-template_funcreg") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.T1w_to_template_xfm') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.T1w_to_template_xfm") # FNIRT pipelines don't have an inverse nonlinear warp, make optional - if strat_pool.check_rpool('from-template_to-T1w_mode-image_xfm'): - node, out = strat_pool.get_data('from-template_to-T1w_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.template_to_T1w_xfm') - - if strat_pool.check_rpool('ants-blip-warp'): - if reg_tool == 'ants': - node, out = strat_pool.get_data('ants-blip-warp') - wf.connect(node, out, xfm, 'inputspec.blip_warp') - elif reg_tool == 'fsl': + if strat_pool.check_rpool("from-template_to-T1w_mode-image_xfm"): + node, out = strat_pool.get_data("from-template_to-T1w_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.template_to_T1w_xfm") + + if strat_pool.check_rpool("ants-blip-warp"): + if reg_tool == "ants": + node, out = strat_pool.get_data("ants-blip-warp") + wf.connect(node, out, xfm, "inputspec.blip_warp") + elif reg_tool == "fsl": # apply the ants blip warp separately pass - elif strat_pool.check_rpool('fsl-blip-warp'): - if reg_tool == 'fsl': - node, out = strat_pool.get_data('fsl-blip-warp') - wf.connect(node, out, xfm, 'inputspec.blip_warp') - elif reg_tool == 'ants': + elif strat_pool.check_rpool("fsl-blip-warp"): + if reg_tool == "fsl": + node, out = strat_pool.get_data("fsl-blip-warp") + wf.connect(node, out, xfm, "inputspec.blip_warp") + elif reg_tool == "ants": # apply the fsl blip warp separately pass @@ -3200,42 +3505,39 @@ def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): }, }, ) -def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, - opt=None): - '''Condense the BOLD-to-T1 coregistration transform and the T1-to- +def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=None): + """Condense the BOLD-to-T1 coregistration transform and the T1-to- symmetric-template transform into one transform matrix. - ''' - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-symtemplate_mode-image_xfm') + """ + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-symtemplate_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - xfm, outputs = bold_to_T1template_xfm_connector('create_func_to_T1wsymtem' - f'plate_xfm_{pipe_num}', - cfg, reg_tool, - symmetric=True) + xfm, outputs = bold_to_T1template_xfm_connector( + "create_func_to_T1wsymtem" f"plate_xfm_{pipe_num}", + cfg, + reg_tool, + symmetric=True, + ) - node, out = strat_pool.get_data( - 'from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, xfm, 'inputspec.coreg_xfm') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, xfm, "inputspec.coreg_xfm") - node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, xfm, 'inputspec.input_brain') + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, xfm, "inputspec.input_brain") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, xfm, 'inputspec.mean_bold') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, xfm, "inputspec.mean_bold") - node, out = strat_pool.get_data('T1w-brain-template-symmetric-deriv') - wf.connect(node, out, xfm, 'inputspec.T1w-brain-template_funcreg') + node, out = strat_pool.get_data("T1w-brain-template-symmetric-deriv") + wf.connect(node, out, xfm, "inputspec.T1w-brain-template_funcreg") - node, out = strat_pool.get_data('from-T1w_to-symtemplate_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.T1w_to_template_xfm') + node, out = strat_pool.get_data("from-T1w_to-symtemplate_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.T1w_to_template_xfm") # FNIRT pipelines don't have an inverse nonlinear warp, make optional - if strat_pool.check_rpool('from-symtemplate_to-T1w_mode-image_xfm'): - node, out = \ - strat_pool.get_data('from-symtemplate_to-T1w_mode-image_xfm') - wf.connect(node, out, xfm, 'inputspec.template_to_T1w_xfm') + if strat_pool.check_rpool("from-symtemplate_to-T1w_mode-image_xfm"): + node, out = strat_pool.get_data("from-symtemplate_to-T1w_mode-image_xfm") + wf.connect(node, out, xfm, "inputspec.template_to_T1w_xfm") return (wf, outputs) @@ -3273,109 +3575,107 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, ], outputs=["sbref", "desc-preproc_bold", "desc-stc_bold", "bold"], ) -def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, - opt=None): - - outputs = {'desc-preproc_bold': strat_pool.get_data("desc-preproc_bold")} +def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + outputs = {"desc-preproc_bold": strat_pool.get_data("desc-preproc_bold")} if not strat_pool.check_rpool("despiked-fieldmap"): return (wf, outputs) - invert_coreg_xfm = pe.Node(interface=fsl.ConvertXFM(), - name=f'invert_coreg_xfm_{pipe_num}') + invert_coreg_xfm = pe.Node( + interface=fsl.ConvertXFM(), name=f"invert_coreg_xfm_{pipe_num}" + ) invert_coreg_xfm.inputs.invert_xfm = True node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") - wf.connect(node, out, invert_coreg_xfm, 'in_file') + wf.connect(node, out, invert_coreg_xfm, "in_file") - warp_fmap = pe.Node(interface=fsl.ApplyWarp(), - name=f'warp_fmap_{pipe_num}') + warp_fmap = pe.Node(interface=fsl.ApplyWarp(), name=f"warp_fmap_{pipe_num}") - node, out = strat_pool.get_data('despiked-fieldmap') - wf.connect(node, out, warp_fmap, 'in_file') + node, out = strat_pool.get_data("despiked-fieldmap") + wf.connect(node, out, warp_fmap, "in_file") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, warp_fmap, 'ref_file') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, warp_fmap, "ref_file") - wf.connect(invert_coreg_xfm, 'out_file', warp_fmap, 'premat') + wf.connect(invert_coreg_xfm, "out_file", warp_fmap, "premat") - mask_fmap = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'mask_fmap_{pipe_num}') - mask_fmap.inputs.args = '-abs -bin' + mask_fmap = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"mask_fmap_{pipe_num}" + ) + mask_fmap.inputs.args = "-abs -bin" - wf.connect(warp_fmap, 'out_file', mask_fmap, 'in_file') + wf.connect(warp_fmap, "out_file", mask_fmap, "in_file") - conv_pedir = \ - pe.Node(interface=util.Function(input_names=['pedir', - 'convert'], - output_names=['pedir'], - function=convert_pedir), - name=f'apply_phasediff_convert_pedir_{pipe_num}') - conv_pedir.inputs.convert = 'ijk_to_xyz' + conv_pedir = pe.Node( + interface=util.Function( + input_names=["pedir", "convert"], + output_names=["pedir"], + function=convert_pedir, + ), + name=f"apply_phasediff_convert_pedir_{pipe_num}", + ) + conv_pedir.inputs.convert = "ijk_to_xyz" - node, out = strat_pool.get_data('pe-direction') - wf.connect(node, out, conv_pedir, 'pedir') + node, out = strat_pool.get_data("pe-direction") + wf.connect(node, out, conv_pedir, "pedir") - fugue_saveshift = pe.Node(interface=fsl.FUGUE(), - name=f'fugue_saveshift_{pipe_num}') + fugue_saveshift = pe.Node(interface=fsl.FUGUE(), name=f"fugue_saveshift_{pipe_num}") fugue_saveshift.inputs.save_shift = True - wf.connect(warp_fmap, 'out_file', fugue_saveshift, 'fmap_in_file') - wf.connect(mask_fmap, 'out_file', fugue_saveshift, 'mask_file') + wf.connect(warp_fmap, "out_file", fugue_saveshift, "fmap_in_file") + wf.connect(mask_fmap, "out_file", fugue_saveshift, "mask_file") # FSL calls effective echo spacing = dwell time (not accurate) - node, out = strat_pool.get_data('effectiveEchoSpacing') - wf.connect(node, out, fugue_saveshift, 'dwell_time') + node, out = strat_pool.get_data("effectiveEchoSpacing") + wf.connect(node, out, fugue_saveshift, "dwell_time") - wf.connect(conv_pedir, 'pedir', fugue_saveshift, 'unwarp_direction') + wf.connect(conv_pedir, "pedir", fugue_saveshift, "unwarp_direction") - shift_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'shift_warp_{pipe_num}') + shift_warp = pe.Node(interface=fsl.ConvertWarp(), name=f"shift_warp_{pipe_num}") shift_warp.inputs.out_relwarp = True - wf.connect(fugue_saveshift, 'shift_out_file', shift_warp, 'shift_in_file') + wf.connect(fugue_saveshift, "shift_out_file", shift_warp, "shift_in_file") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, shift_warp, 'reference') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, shift_warp, "reference") - wf.connect(conv_pedir, 'pedir', shift_warp, 'shift_direction') + wf.connect(conv_pedir, "pedir", shift_warp, "shift_direction") - warp_bold = pe.Node(interface=fsl.ApplyWarp(), - name=f'warp_bold_phasediff_{pipe_num}') + warp_bold = pe.Node( + interface=fsl.ApplyWarp(), name=f"warp_bold_phasediff_{pipe_num}" + ) warp_bold.inputs.relwarp = True - warp_bold.inputs.interp = 'spline' - - if opt == 'default': - node, out = strat_pool.get_data('desc-preproc_bold') - out_label = 'desc-preproc_bold' - elif opt == 'single_step_resampling_from_stc': - node, out = strat_pool.get_data('desc-stc_bold') - out_label = 'desc-stc_bold' - elif opt == 'abcd': - node, out = strat_pool.get_data('bold') - out_label = 'bold' - - wf.connect(node, out, warp_bold, 'in_file') - - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, warp_bold, 'ref_file') - - wf.connect(shift_warp, 'out_file', warp_bold, 'field_file') - - warp_sbref = pe.Node(interface=fsl.ApplyWarp(), - name=f'warp_sbref_phasediff_{pipe_num}') + warp_bold.inputs.interp = "spline" + + if opt == "default": + node, out = strat_pool.get_data("desc-preproc_bold") + out_label = "desc-preproc_bold" + elif opt == "single_step_resampling_from_stc": + node, out = strat_pool.get_data("desc-stc_bold") + out_label = "desc-stc_bold" + elif opt == "abcd": + node, out = strat_pool.get_data("bold") + out_label = "bold" + + wf.connect(node, out, warp_bold, "in_file") + + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, warp_bold, "ref_file") + + wf.connect(shift_warp, "out_file", warp_bold, "field_file") + + warp_sbref = pe.Node( + interface=fsl.ApplyWarp(), name=f"warp_sbref_phasediff_{pipe_num}" + ) warp_sbref.inputs.relwarp = True - warp_sbref.inputs.interp = 'spline' + warp_sbref.inputs.interp = "spline" - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, warp_sbref, 'in_file') - wf.connect(node, out, warp_sbref, 'ref_file') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, warp_sbref, "in_file") + wf.connect(node, out, warp_sbref, "ref_file") - wf.connect(shift_warp, 'out_file', warp_sbref, 'field_file') + wf.connect(shift_warp, "out_file", warp_sbref, "field_file") - outputs = { - out_label: (warp_bold, 'out_file'), - 'sbref': (warp_sbref, 'out_file') - } + outputs = {out_label: (warp_bold, "out_file"), "sbref": (warp_sbref, "out_file")} return (wf, outputs) @@ -3412,69 +3712,67 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, ], outputs=["desc-preproc_bold", "desc-stc_bold", "bold"], ) -def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, - opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-bold_to-template_mode-image_xfm') +def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - outputs = {'desc-preproc_bold': strat_pool.get_data("desc-preproc_bold")} + outputs = {"desc-preproc_bold": strat_pool.get_data("desc-preproc_bold")} if strat_pool.check_rpool("ants-blip-warp"): - if reg_tool == 'fsl': + if reg_tool == "fsl": blip_node, blip_out = strat_pool.get_data("ants-blip-warp") - reg_tool = 'ants' + reg_tool = "ants" else: return (wf, outputs) elif strat_pool.check_rpool("fsl-blip-warp"): - if reg_tool == 'ants': + if reg_tool == "ants": blip_node, blip_out = strat_pool.get_data("fsl-blip-warp") - reg_tool = 'fsl' + reg_tool = "fsl" else: return (wf, outputs) else: return (wf, outputs) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_ts_to_blip_sep_{pipe_num}', reg_tool, - time_series=True, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_ts_to_blip_sep_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-preproc_bold") - if opt == 'default': - node, out = strat_pool.get_data('desc-preproc_bold') - out_label = 'desc-preproc_bold' - elif opt == 'single_step_resampling_from_stc': - node, out = strat_pool.get_data('desc-stc_bold') - out_label = 'desc-stc_bold' - elif opt == 'abcd': - node, out = strat_pool.get_data('bold') - out_label = 'bold' + if opt == "default": + node, out = strat_pool.get_data("desc-preproc_bold") + out_label = "desc-preproc_bold" + elif opt == "single_step_resampling_from_stc": + node, out = strat_pool.get_data("desc-stc_bold") + out_label = "desc-stc_bold" + elif opt == "abcd": + node, out = strat_pool.get_data("bold") + out_label = "bold" - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("sbref") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") - wf.connect(blip_node, blip_out, apply_xfm, 'inputspec.transform') + wf.connect(blip_node, blip_out, apply_xfm, "inputspec.transform") - outputs = { - out_label: (apply_xfm, 'outputspec.output_image') - } + outputs = {out_label: (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -3494,42 +3792,41 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, outputs={"space-template_desc-head_T1w": {"Template": "T1w-template"}}, ) def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_wholehead_T1w_to_T1template_{pipe_num}', - reg_tool, time_series=False, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_wholehead_T1w_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-head_T1w") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = { - 'space-template_desc-head_T1w': (apply_xfm, 'outputspec.output_image') - } + outputs = {"space-template_desc-head_T1w": (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -3548,22 +3845,23 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-brain_mask": {"Template": "T1w-template"}}, ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_T1mask_to_T1template_{pipe_num}', - reg_tool, time_series=False, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_T1mask_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor" - ''' + """ if reg_tool == 'ants': apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ 'functional_registration']['func_registration_to_template'][ @@ -3572,20 +3870,18 @@ def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ 'functional_registration']['func_registration_to_template'][ 'FNIRT_pipelines']['interpolation'] - ''' + """ connect = strat_pool.get_data("space-T1w_desc-brain_mask") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-template") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = { - 'space-template_desc-brain_mask': (apply_xfm, 'outputspec.output_image') - } + outputs = {"space-template_desc-brain_mask": (apply_xfm, "outputspec.output_image")} return (wf, outputs) @@ -3605,46 +3901,46 @@ def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "T1w-brain-template-funcreg", ], outputs={ - "space-template_desc-preproc_bold": { - "Template": "T1w-brain-template-funcreg"} + "space-template_desc-preproc_bold": {"Template": "T1w-brain-template-funcreg"} }, ) def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - - xfm_prov = strat_pool.get_cpac_provenance( - 'from-bold_to-template_mode-image_xfm') + xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_ts_to_T1template_{pipe_num}', reg_tool, - time_series=True, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_ts_to_T1template_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-preproc_bold") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-brain-template-funcreg") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-bold_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") outputs = { - 'space-template_desc-preproc_bold': (apply_xfm, 'outputspec.output_image') + "space-template_desc-preproc_bold": (apply_xfm, "outputspec.output_image") } return (wf, outputs) @@ -3670,43 +3966,46 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): } }, ) -def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, - opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - 'from-bold_to-template_mode-image_xfm') +def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None): + xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'] + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - apply_xfm = apply_transform(f'warp_ts_to_T1template_{pipe_num}', reg_tool, - time_series=True, num_cpus=num_cpus, - num_ants_cores=num_ants_cores) + apply_xfm = apply_transform( + f"warp_ts_to_T1template_{pipe_num}", + reg_tool, + time_series=True, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - if reg_tool == 'ants': + if reg_tool == "ants": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': + "functional_registration" + ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] + elif reg_tool == "fsl": apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] + "functional_registration" + ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] connect = strat_pool.get_data("desc-preproc_bold") node, out = connect - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-brain-template-deriv") - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-bold_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") outputs = { - 'space-template_res-derivative_desc-preproc_bold': - (apply_xfm, 'outputspec.output_image') + "space-template_res-derivative_desc-preproc_bold": ( + apply_xfm, + "outputspec.output_image", + ) } return (wf, outputs) @@ -3723,8 +4022,12 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, option_key=["apply_transform", "using"], option_val="abcd", inputs=[ - ("desc-preproc_bold", "desc-reorient_bold", "motion-basefile", - "coordinate-transformation"), + ( + "desc-preproc_bold", + "desc-reorient_bold", + "motion-basefile", + "coordinate-transformation", + ), "from-T1w_to-template_mode-image_xfm", "from-bold_to-T1w_mode-image_desc-linear_xfm", "from-bold_to-template_mode-image_xfm", @@ -3738,281 +4041,348 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, "space-template_desc-brain_mask", ], outputs={ - "space-template_desc-preproc_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-scout_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-head_bold": { - "Template": "T1w-brain-template-funcreg"}, + "space-template_desc-preproc_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-scout_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-head_bold": {"Template": "T1w-brain-template-funcreg"}, }, ) -def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None - ): +def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries using ABCD-style registration # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L197 # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz - convert_func_to_anat_linear_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_func_to_anat_linear_warp_{pipe_num}') + convert_func_to_anat_linear_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_func_to_anat_linear_warp_{pipe_num}" + ) convert_func_to_anat_linear_warp.inputs.out_relwarp = True convert_func_to_anat_linear_warp.inputs.relwarp = True - - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'reference') - - if strat_pool.check_rpool('fsl-blip-warp'): - node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'postmat') - - node, out = strat_pool.get_data('fsl-blip-warp') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'warp1') + + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, convert_func_to_anat_linear_warp, "reference") + + if strat_pool.check_rpool("fsl-blip-warp"): + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, convert_func_to_anat_linear_warp, "postmat") + + node, out = strat_pool.get_data("fsl-blip-warp") + wf.connect(node, out, convert_func_to_anat_linear_warp, "warp1") else: - node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, convert_func_to_anat_linear_warp, 'premat') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, convert_func_to_anat_linear_warp, "premat") # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L140 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} - convert_func_to_standard_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_func_to_standard_warp_{pipe_num}') + convert_func_to_standard_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" + ) convert_func_to_standard_warp.inputs.out_relwarp = True convert_func_to_standard_warp.inputs.relwarp = True - wf.connect(convert_func_to_anat_linear_warp, 'out_file', - convert_func_to_standard_warp, 'warp1') + wf.connect( + convert_func_to_anat_linear_warp, + "out_file", + convert_func_to_standard_warp, + "warp1", + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, convert_func_to_standard_warp, 'warp2') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, convert_func_to_standard_warp, "warp2") + from CPAC.func_preproc.func_preproc import ( + anat_brain_mask_to_bold_res, + anat_brain_to_bold_res, + ) - from CPAC.func_preproc.func_preproc import anat_brain_to_bold_res, anat_brain_mask_to_bold_res anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) - node, out = strat_pool.get_data('space-template_desc-preproc_T1w') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + node, out = strat_pool.get_data("space-template_desc-preproc_T1w") + wf.connect( + node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w" + ) - node, out = strat_pool.get_data('T1w-template-funcreg') - wf.connect(node, out, anat_brain_to_func_res, 'inputspec.T1w-template-funcreg') + node, out = strat_pool.get_data("T1w-template-funcreg") + wf.connect(node, out, anat_brain_to_func_res, "inputspec.T1w-template-funcreg") - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_func_to_standard_warp, 'reference') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + convert_func_to_standard_warp, + "reference", + ) # TODO add condition: if no gradient distortion # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 - extract_func_roi = pe.Node(interface=fsl.ExtractROI(), - name=f'extract_func_roi_{pipe_num}') + extract_func_roi = pe.Node( + interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" + ) extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data('desc-reorient_bold') - wf.connect(node, out, extract_func_roi, 'in_file') + node, out = strat_pool.get_data("desc-reorient_bold") + wf.connect(node, out, extract_func_roi, "in_file") # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp - multiply_func_roi_by_zero = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'multiply_func_roi_by_zero_{pipe_num}') + multiply_func_roi_by_zero = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"multiply_func_roi_by_zero_{pipe_num}" + ) - multiply_func_roi_by_zero.inputs.args = '-mul 0' + multiply_func_roi_by_zero.inputs.args = "-mul 0" - wf.connect(extract_func_roi, 'roi_file', - multiply_func_roi_by_zero, 'in_file') + wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t - split_func = pe.Node(interface=fsl.Split(), - name=f'split_func_{pipe_num}') + split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") - split_func.inputs.dimension = 't' + split_func.inputs.dimension = "t" - node, out = strat_pool.get_data('desc-reorient_bold') - wf.connect(node, out, split_func, 'in_file') + node, out = strat_pool.get_data("desc-reorient_bold") + wf.connect(node, out, split_func, "in_file") ### Loop starts! ### # convertwarp --relout --rel --ref=${WD}/prevols/vol${vnum}.nii.gz --warp1=${GradientDistortionField} --postmat=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz - convert_motion_distortion_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_motion_distortion_warp_{pipe_num}', - iterfield=['reference', 'postmat']) + convert_motion_distortion_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_motion_distortion_warp_{pipe_num}", + iterfield=["reference", "postmat"], + ) convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect(multiply_func_roi_by_zero, 'out_file', - convert_motion_distortion_warp, 'warp1') + wf.connect( + multiply_func_roi_by_zero, "out_file", convert_motion_distortion_warp, "warp1" + ) - wf.connect(split_func, 'out_files', - convert_motion_distortion_warp, 'reference') + wf.connect(split_func, "out_files", convert_motion_distortion_warp, "reference") - node, out = strat_pool.get_data('coordinate-transformation') - wf.connect(node, out, convert_motion_distortion_warp, 'postmat') + node, out = strat_pool.get_data("coordinate-transformation") + wf.connect(node, out, convert_motion_distortion_warp, "postmat") # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz --warp2=${OutputTransform} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz - convert_registration_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_registration_warp_{pipe_num}', - iterfield=['warp1']) + convert_registration_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_registration_warp_{pipe_num}", + iterfield=["warp1"], + ) convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_registration_warp, 'reference') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + convert_registration_warp, + "reference", + ) - wf.connect(convert_motion_distortion_warp, 'out_file', - convert_registration_warp, 'warp1') + wf.connect( + convert_motion_distortion_warp, "out_file", convert_registration_warp, "warp1" + ) - wf.connect(convert_func_to_standard_warp, 'out_file', - convert_registration_warp, 'warp2') + wf.connect( + convert_func_to_standard_warp, "out_file", convert_registration_warp, "warp2" + ) # fslmaths ${WD}/prevols/vol${vnum}.nii.gz -mul 0 -add 1 ${WD}/prevols/vol${vnum}_mask.nii.gz - generate_vol_mask = pe.MapNode(interface=fsl.maths.MathsCommand(), - name=f'generate_mask_{pipe_num}', - iterfield=['in_file']) + generate_vol_mask = pe.MapNode( + interface=fsl.maths.MathsCommand(), + name=f"generate_mask_{pipe_num}", + iterfield=["in_file"], + ) - generate_vol_mask.inputs.args = '-mul 0 -add 1' + generate_vol_mask.inputs.args = "-mul 0 -add 1" - wf.connect(split_func, 'out_files', - generate_vol_mask, 'in_file') + wf.connect(split_func, "out_files", generate_vol_mask, "in_file") # applywarp --rel --interp=spline --in=${WD}/prevols/vol${vnum}.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}.nii.gz - applywarp_func_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_to_standard.inputs.relwarp = True - applywarp_func_to_standard.inputs.interp = 'spline' + applywarp_func_to_standard.inputs.interp = "spline" - wf.connect(split_func, 'out_files', - applywarp_func_to_standard, 'in_file') + wf.connect(split_func, "out_files", applywarp_func_to_standard, "in_file") - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_to_standard, 'field_file') + wf.connect( + convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file" + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - applywarp_func_to_standard, 'ref_file') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + applywarp_func_to_standard, + "ref_file", + ) # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz - applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_mask_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_mask_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_mask_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_mask_to_standard.inputs.relwarp = True - applywarp_func_mask_to_standard.inputs.interp = 'nn' + applywarp_func_mask_to_standard.inputs.interp = "nn" - wf.connect(generate_vol_mask, 'out_file', - applywarp_func_mask_to_standard, 'in_file') + wf.connect( + generate_vol_mask, "out_file", applywarp_func_mask_to_standard, "in_file" + ) - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_mask_to_standard, 'field_file') + wf.connect( + convert_registration_warp, + "out_file", + applywarp_func_mask_to_standard, + "field_file", + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - applywarp_func_mask_to_standard, 'ref_file') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + applywarp_func_mask_to_standard, + "ref_file", + ) ### Loop ends! ### # fslmerge -tr ${OutputfMRI} $FrameMergeSTRING $TR_vol - merge_func_to_standard = pe.Node(interface=fslMerge(), - name=f'merge_func_to_standard_{pipe_num}') + merge_func_to_standard = pe.Node( + interface=fslMerge(), name=f"merge_func_to_standard_{pipe_num}" + ) - merge_func_to_standard.inputs.dimension = 't' + merge_func_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_to_standard, 'out_file', - merge_func_to_standard, 'in_files') + wf.connect( + applywarp_func_to_standard, "out_file", merge_func_to_standard, "in_files" + ) # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol - merge_func_mask_to_standard = pe.Node(interface=fslMerge(), - name='merge_func_mask_to_' - f'standard_{pipe_num}') + merge_func_mask_to_standard = pe.Node( + interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + ) - merge_func_mask_to_standard.inputs.dimension = 't' + merge_func_mask_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_mask_to_standard, 'out_file', - merge_func_mask_to_standard, 'in_files') + wf.connect( + applywarp_func_mask_to_standard, + "out_file", + merge_func_mask_to_standard, + "in_files", + ) # fslmaths ${OutputfMRI}_mask -Tmin ${OutputfMRI}_mask - find_min_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'find_min_mask_{pipe_num}') + find_min_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"find_min_mask_{pipe_num}" + ) - find_min_mask.inputs.args = '-Tmin' + find_min_mask.inputs.args = "-Tmin" - wf.connect(merge_func_mask_to_standard, 'merged_file', - find_min_mask, 'in_file') + wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") # Combine transformations: gradient non-linearity distortion + fMRI_dc to standard # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${GradientDistortionField} --warp2=${OutputTransform} --out=${WD}/Scout_gdc_MNI_warp.nii.gz - convert_dc_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_dc_warp_{pipe_num}') + convert_dc_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_dc_warp_{pipe_num}" + ) convert_dc_warp.inputs.out_relwarp = True convert_dc_warp.inputs.relwarp = True - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', convert_dc_warp, 'reference') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + convert_dc_warp, + "reference", + ) - wf.connect(multiply_func_roi_by_zero, 'out_file', - convert_dc_warp, 'warp1') + wf.connect(multiply_func_roi_by_zero, "out_file", convert_dc_warp, "warp1") - wf.connect(convert_func_to_standard_warp, 'out_file', - convert_dc_warp, 'warp2') + wf.connect(convert_func_to_standard_warp, "out_file", convert_dc_warp, "warp2") # applywarp --rel --interp=spline --in=${ScoutInput} -w ${WD}/Scout_gdc_MNI_warp.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} -o ${ScoutOutput} - applywarp_scout = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_scout_input_{pipe_num}') + applywarp_scout = pe.Node( + interface=fsl.ApplyWarp(), name=f"applywarp_scout_input_{pipe_num}" + ) applywarp_scout.inputs.relwarp = True - applywarp_scout.inputs.interp = 'spline' + applywarp_scout.inputs.interp = "spline" - node, out = strat_pool.get_data('motion-basefile') - wf.connect(node, out, applywarp_scout, 'in_file') + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, applywarp_scout, "in_file") - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', applywarp_scout, 'ref_file') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + applywarp_scout, + "ref_file", + ) - wf.connect(convert_dc_warp, 'out_file', applywarp_scout, 'field_file') + wf.connect(convert_dc_warp, "out_file", applywarp_scout, "field_file") # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 # fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float - merge_func_mask = pe.Node(util.Merge(2), - name=f'merge_func_mask_{pipe_num}') + merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") + + anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res( + wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num + ) - anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(wf_name='anat_brain_mask_to_bold_res', cfg=cfg, pipe_num=pipe_num) - - node, out = strat_pool.get_data('space-template_desc-brain_mask') - wf.connect(node, out, anat_brain_mask_to_func_res, 'inputspec.space-template_desc-T1w_mask') + node, out = strat_pool.get_data("space-template_desc-brain_mask") + wf.connect( + node, out, anat_brain_mask_to_func_res, "inputspec.space-template_desc-T1w_mask" + ) - wf.connect(anat_brain_to_func_res, 'outputspec.space-template_res-bold_desc-brain_T1w', - anat_brain_mask_to_func_res, 'inputspec.space-template_desc-preproc_T1w') + wf.connect( + anat_brain_to_func_res, + "outputspec.space-template_res-bold_desc-brain_T1w", + anat_brain_mask_to_func_res, + "inputspec.space-template_desc-preproc_T1w", + ) - wf.connect(anat_brain_mask_to_func_res, 'outputspec.space-template_desc-bold_mask', merge_func_mask, 'in1') + wf.connect( + anat_brain_mask_to_func_res, + "outputspec.space-template_desc-bold_mask", + merge_func_mask, + "in1", + ) - wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in2') + wf.connect(find_min_mask, "out_file", merge_func_mask, "in2") - extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'extract_func_brain_{pipe_num}') + extract_func_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"extract_func_brain_{pipe_num}" + ) - extract_func_brain.inputs.op_string = '-mas %s -mas %s -thr 0 -ing 10000' - extract_func_brain.inputs.output_datatype = 'float' + extract_func_brain.inputs.op_string = "-mas %s -mas %s -thr 0 -ing 10000" + extract_func_brain.inputs.output_datatype = "float" - wf.connect(merge_func_to_standard, 'merged_file', - extract_func_brain, 'in_file') + wf.connect(merge_func_to_standard, "merged_file", extract_func_brain, "in_file") - wf.connect(merge_func_mask, 'out', - extract_func_brain, 'operand_files') + wf.connect(merge_func_mask, "out", extract_func_brain, "operand_files") # fslmaths ${ScoutInput} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${ScoutOutput} -odt float - extract_scout_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'extract_scout_brain_{pipe_num}') + extract_scout_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"extract_scout_brain_{pipe_num}" + ) - extract_scout_brain.inputs.op_string = '-mas %s -mas %s -thr 0 -ing 10000' - extract_scout_brain.inputs.output_datatype = 'float' + extract_scout_brain.inputs.op_string = "-mas %s -mas %s -thr 0 -ing 10000" + extract_scout_brain.inputs.output_datatype = "float" - wf.connect(applywarp_scout, 'out_file', - extract_scout_brain, 'in_file') + wf.connect(applywarp_scout, "out_file", extract_scout_brain, "in_file") - wf.connect(merge_func_mask, 'out', - extract_scout_brain, 'operand_files') + wf.connect(merge_func_mask, "out", extract_scout_brain, "operand_files") outputs = { - 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), - 'space-template_desc-scout_bold': (extract_scout_brain, 'out_file'), - 'space-template_desc-head_bold': (merge_func_to_standard, 'merged_file') + "space-template_desc-preproc_bold": (extract_func_brain, "out_file"), + "space-template_desc-scout_bold": (extract_scout_brain, "out_file"), + "space-template_desc-head_bold": (merge_func_to_standard, "merged_file"), } return (wf, outputs) @@ -4045,265 +4415,302 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None "space-template_desc-bold_mask": {"Template": "T1w-template"}, }, ) -def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, - opt=None): +def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=None): # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries # Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L131 # ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} - anat_resample = pe.Node(interface=fsl.FLIRT(), - name=f'anat_resample_func_res_{pipe_num}' - ) - anat_resample.inputs.apply_isoxfm = float(cfg.registration_workflows['functional_registration']['func_registration_to_template']['output_resolution']['func_preproc_outputs'].replace("mm", "")) - anat_resample.inputs.interp = 'spline' + anat_resample = pe.Node( + interface=fsl.FLIRT(), name=f"anat_resample_func_res_{pipe_num}" + ) + anat_resample.inputs.apply_isoxfm = float( + cfg.registration_workflows["functional_registration"][ + "func_registration_to_template" + ]["output_resolution"]["func_preproc_outputs"].replace("mm", "") + ) + anat_resample.inputs.interp = "spline" - node, out = strat_pool.get_data('space-template_desc-head_T1w') - wf.connect(node, out, anat_resample, 'in_file') - wf.connect(node, out, anat_resample, 'reference') + node, out = strat_pool.get_data("space-template_desc-head_T1w") + wf.connect(node, out, anat_resample, "in_file") + wf.connect(node, out, anat_resample, "reference") # ${FSLDIR}/bin/applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} - applywarp_anat_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'anat_func_res_{pipe_num}') + applywarp_anat_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"anat_func_res_{pipe_num}" + ) applywarp_anat_res.inputs.relwarp = True - applywarp_anat_res.inputs.interp = 'spline' - applywarp_anat_res.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_anat_res.inputs.interp = "spline" + applywarp_anat_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data('space-template_desc-head_T1w') - wf.connect(node, out, applywarp_anat_res, 'in_file') - wf.connect(anat_resample, 'out_file', applywarp_anat_res, 'ref_file') + node, out = strat_pool.get_data("space-template_desc-head_T1w") + wf.connect(node, out, applywarp_anat_res, "in_file") + wf.connect(anat_resample, "out_file", applywarp_anat_res, "ref_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L136-L138 # Create brain masks in this space (changing resolution) # ${FSLDIR}/bin/applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - applywarp_anat_mask_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'anat_mask_func_res_{pipe_num}') + applywarp_anat_mask_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"anat_mask_func_res_{pipe_num}" + ) applywarp_anat_mask_res.inputs.relwarp = True - applywarp_anat_mask_res.inputs.interp = 'nn' - applywarp_anat_mask_res.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_anat_mask_res.inputs.interp = "nn" + applywarp_anat_mask_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data('space-template_desc-T1w_mask') - wf.connect(node, out, applywarp_anat_mask_res, 'in_file') - wf.connect(applywarp_anat_res, 'out_file', applywarp_anat_mask_res, 'ref_file') + node, out = strat_pool.get_data("space-template_desc-T1w_mask") + wf.connect(node, out, applywarp_anat_mask_res, "in_file") + wf.connect(applywarp_anat_res, "out_file", applywarp_anat_mask_res, "ref_file") # ${FSLDIR}/bin/fslmaths ${WD}/${T1wImageFile}.${FinalfMRIResolution} -mas ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz - T1_brain_res = pe.Node(interface=fsl.MultiImageMaths(), - name=f't1_brain_func_res_{pipe_num}') + T1_brain_res = pe.Node( + interface=fsl.MultiImageMaths(), name=f"t1_brain_func_res_{pipe_num}" + ) T1_brain_res.inputs.op_string = "-mas %s " - wf.connect(applywarp_anat_res, 'out_file', T1_brain_res, 'in_file') - wf.connect(applywarp_anat_mask_res, 'out_file', T1_brain_res, 'operand_files') + wf.connect(applywarp_anat_res, "out_file", T1_brain_res, "in_file") + wf.connect(applywarp_anat_mask_res, "out_file", T1_brain_res, "operand_files") # Create versions of the biasfield (changing resolution) # ${FSLDIR}/bin/applywarp --rel --interp=spline -i ${BiasField} -r ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${BiasFieldFile}.${FinalfMRIResolution} - applywarp_bias_field_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'biasfiled_func_res_{pipe_num}') + applywarp_bias_field_res = pe.Node( + interface=fsl.ApplyWarp(), name=f"biasfiled_func_res_{pipe_num}" + ) applywarp_bias_field_res.inputs.relwarp = True - applywarp_bias_field_res.inputs.interp = 'spline' - applywarp_bias_field_res.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_bias_field_res.inputs.interp = "spline" + applywarp_bias_field_res.inputs.premat = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["identity_matrix"] - node, out = strat_pool.get_data('space-template_desc-T1wT2w_biasfield') - wf.connect(node, out, applywarp_bias_field_res, 'in_file') - wf.connect(T1_brain_res, 'out_file', applywarp_bias_field_res, 'ref_file') + node, out = strat_pool.get_data("space-template_desc-T1wT2w_biasfield") + wf.connect(node, out, applywarp_bias_field_res, "in_file") + wf.connect(T1_brain_res, "out_file", applywarp_bias_field_res, "ref_file") # ${FSLDIR}/bin/fslmaths ${WD}/${BiasFieldFile}.${FinalfMRIResolution} -thr 0.1 ${WD}/${BiasFieldFile}.${FinalfMRIResolution} - biasfield_thr = pe.Node(interface=fsl.MultiImageMaths(), - name=f'biasfiedl_thr_{pipe_num}') + biasfield_thr = pe.Node( + interface=fsl.MultiImageMaths(), name=f"biasfiedl_thr_{pipe_num}" + ) biasfield_thr.inputs.op_string = "-thr 0.1" - wf.connect(applywarp_bias_field_res, 'out_file', biasfield_thr, 'in_file') + wf.connect(applywarp_bias_field_res, "out_file", biasfield_thr, "in_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L144-L146 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} - convert_func_to_standard_warp = pe.Node(interface=fsl.ConvertWarp(), - name=f'convert_func_to_standard_warp_{pipe_num}') + convert_func_to_standard_warp = pe.Node( + interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" + ) convert_func_to_standard_warp.inputs.out_relwarp = True convert_func_to_standard_warp.inputs.relwarp = True - node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_warp') - wf.connect(node, out, convert_func_to_standard_warp, 'warp1') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_warp") + wf.connect(node, out, convert_func_to_standard_warp, "warp1") - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_warp') - wf.connect(node, out, convert_func_to_standard_warp, 'warp2') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_warp") + wf.connect(node, out, convert_func_to_standard_warp, "warp2") - wf.connect(applywarp_anat_res, 'out_file', convert_func_to_standard_warp, 'reference') + wf.connect( + applywarp_anat_res, "out_file", convert_func_to_standard_warp, "reference" + ) # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L157-L158 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 - extract_func_roi = pe.Node(interface=fsl.ExtractROI(), - name=f'extract_func_roi_{pipe_num}') + extract_func_roi = pe.Node( + interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" + ) extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data(['desc-reorient_bold', 'bold']) - wf.connect(node, out, extract_func_roi, 'in_file') + node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + wf.connect(node, out, extract_func_roi, "in_file") # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp - multiply_func_roi_by_zero = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'multiply_func_roi_by_zero_{pipe_num}') + multiply_func_roi_by_zero = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"multiply_func_roi_by_zero_{pipe_num}" + ) - multiply_func_roi_by_zero.inputs.args = '-mul 0' + multiply_func_roi_by_zero.inputs.args = "-mul 0" - wf.connect(extract_func_roi, 'roi_file', - multiply_func_roi_by_zero, 'in_file') + wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L173 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t - split_func = pe.Node(interface=fsl.Split(), - name=f'split_func_{pipe_num}') + split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") - split_func.inputs.dimension = 't' + split_func.inputs.dimension = "t" - node, out = strat_pool.get_data(['desc-reorient_bold', 'bold']) - wf.connect(node, out, split_func, 'in_file') + node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + wf.connect(node, out, split_func, "in_file") ### Loop starts! ### # convertwarp --relout --rel --ref=${WD}/prevols/vol${vnum}.nii.gz --warp1=${GradientDistortionField} --postmat=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz - convert_motion_distortion_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_motion_distortion_warp_{pipe_num}', - iterfield=['reference', 'postmat']) + convert_motion_distortion_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_motion_distortion_warp_{pipe_num}", + iterfield=["reference", "postmat"], + ) convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect(multiply_func_roi_by_zero, 'out_file', - convert_motion_distortion_warp, 'warp1') + wf.connect( + multiply_func_roi_by_zero, "out_file", convert_motion_distortion_warp, "warp1" + ) - wf.connect(split_func, 'out_files', - convert_motion_distortion_warp, 'reference') + wf.connect(split_func, "out_files", convert_motion_distortion_warp, "reference") - node, out = strat_pool.get_data('coordinate-transformation') - wf.connect(node, out, convert_motion_distortion_warp, 'postmat') + node, out = strat_pool.get_data("coordinate-transformation") + wf.connect(node, out, convert_motion_distortion_warp, "postmat") # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz --warp2=${OutputTransform} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz - convert_registration_warp = pe.MapNode(interface=fsl.ConvertWarp(), - name=f'convert_registration_warp_{pipe_num}', - iterfield=['warp1']) + convert_registration_warp = pe.MapNode( + interface=fsl.ConvertWarp(), + name=f"convert_registration_warp_{pipe_num}", + iterfield=["warp1"], + ) convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - wf.connect(applywarp_anat_res, 'out_file', convert_registration_warp, 'reference') + wf.connect(applywarp_anat_res, "out_file", convert_registration_warp, "reference") - wf.connect(convert_motion_distortion_warp, 'out_file', - convert_registration_warp, 'warp1') + wf.connect( + convert_motion_distortion_warp, "out_file", convert_registration_warp, "warp1" + ) - wf.connect(convert_func_to_standard_warp, 'out_file', - convert_registration_warp, 'warp2') + wf.connect( + convert_func_to_standard_warp, "out_file", convert_registration_warp, "warp2" + ) # fslmaths ${WD}/prevols/vol${vnum}.nii.gz -mul 0 -add 1 ${WD}/prevols/vol${vnum}_mask.nii.gz - generate_vol_mask = pe.MapNode(interface=fsl.maths.MathsCommand(), - name=f'generate_mask_{pipe_num}', - iterfield=['in_file']) + generate_vol_mask = pe.MapNode( + interface=fsl.maths.MathsCommand(), + name=f"generate_mask_{pipe_num}", + iterfield=["in_file"], + ) - generate_vol_mask.inputs.args = '-mul 0 -add 1' + generate_vol_mask.inputs.args = "-mul 0 -add 1" - wf.connect(split_func, 'out_files', - generate_vol_mask, 'in_file') + wf.connect(split_func, "out_files", generate_vol_mask, "in_file") # applywarp --rel --interp=spline --in=${WD}/prevols/vol${vnum}.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}.nii.gz - applywarp_func_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_to_standard.inputs.relwarp = True - applywarp_func_to_standard.inputs.interp = 'spline' + applywarp_func_to_standard.inputs.interp = "spline" - wf.connect(split_func, 'out_files', - applywarp_func_to_standard, 'in_file') + wf.connect(split_func, "out_files", applywarp_func_to_standard, "in_file") - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_to_standard, 'field_file') + wf.connect( + convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file" + ) - wf.connect(applywarp_anat_res, 'out_file', - applywarp_func_to_standard, 'ref_file') + wf.connect(applywarp_anat_res, "out_file", applywarp_func_to_standard, "ref_file") # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz - applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), - name=f'applywarp_func_mask_to_standard_{pipe_num}', - iterfield=['in_file', 'field_file']) + applywarp_func_mask_to_standard = pe.MapNode( + interface=fsl.ApplyWarp(), + name=f"applywarp_func_mask_to_standard_{pipe_num}", + iterfield=["in_file", "field_file"], + ) applywarp_func_mask_to_standard.inputs.relwarp = True - applywarp_func_mask_to_standard.inputs.interp = 'nn' + applywarp_func_mask_to_standard.inputs.interp = "nn" - wf.connect(generate_vol_mask, 'out_file', - applywarp_func_mask_to_standard, 'in_file') + wf.connect( + generate_vol_mask, "out_file", applywarp_func_mask_to_standard, "in_file" + ) - wf.connect(convert_registration_warp, 'out_file', - applywarp_func_mask_to_standard, 'field_file') + wf.connect( + convert_registration_warp, + "out_file", + applywarp_func_mask_to_standard, + "field_file", + ) - wf.connect(applywarp_anat_res, 'out_file', - applywarp_func_mask_to_standard, 'ref_file') + wf.connect( + applywarp_anat_res, "out_file", applywarp_func_mask_to_standard, "ref_file" + ) ### Loop ends! ### # fslmerge -tr ${OutputfMRI} $FrameMergeSTRING $TR_vol - merge_func_to_standard = pe.Node(interface=fslMerge(), - name=f'merge_func_to_standard_{pipe_num}') + merge_func_to_standard = pe.Node( + interface=fslMerge(), name=f"merge_func_to_standard_{pipe_num}" + ) - merge_func_to_standard.inputs.dimension = 't' + merge_func_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_to_standard, 'out_file', - merge_func_to_standard, 'in_files') + wf.connect( + applywarp_func_to_standard, "out_file", merge_func_to_standard, "in_files" + ) # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol - merge_func_mask_to_standard = pe.Node(interface=fslMerge(), - name='merge_func_mask_to_' - f'standard_{pipe_num}') + merge_func_mask_to_standard = pe.Node( + interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + ) - merge_func_mask_to_standard.inputs.dimension = 't' + merge_func_mask_to_standard.inputs.dimension = "t" - wf.connect(applywarp_func_mask_to_standard, 'out_file', - merge_func_mask_to_standard, 'in_files') + wf.connect( + applywarp_func_mask_to_standard, + "out_file", + merge_func_mask_to_standard, + "in_files", + ) # fslmaths ${OutputfMRI}_mask -Tmin ${OutputfMRI}_mask - find_min_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'find_min_mask_{pipe_num}') + find_min_mask = pe.Node( + interface=fsl.maths.MathsCommand(), name=f"find_min_mask_{pipe_num}" + ) - find_min_mask.inputs.args = '-Tmin' + find_min_mask.inputs.args = "-Tmin" - wf.connect(merge_func_mask_to_standard, 'merged_file', - find_min_mask, 'in_file') + wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 # fslmaths ${InputfMRI} -div ${BiasField} $jacobiancom -mas ${BrainMask} -mas ${InputfMRI}_mask -ing 10000 ${OutputfMRI} -odt float - merge_func_mask = pe.Node(util.Merge(3), - name=f'merge_operand_files_{pipe_num}') - - wf.connect(biasfield_thr, 'out_file', merge_func_mask, 'in1') + merge_func_mask = pe.Node(util.Merge(3), name=f"merge_operand_files_{pipe_num}") - wf.connect(applywarp_anat_mask_res, 'out_file', merge_func_mask, 'in2') + wf.connect(biasfield_thr, "out_file", merge_func_mask, "in1") - wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in3') + wf.connect(applywarp_anat_mask_res, "out_file", merge_func_mask, "in2") + wf.connect(find_min_mask, "out_file", merge_func_mask, "in3") - extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'extract_func_brain_{pipe_num}') + extract_func_brain = pe.Node( + interface=fsl.MultiImageMaths(), name=f"extract_func_brain_{pipe_num}" + ) - extract_func_brain.inputs.op_string = '-div %s -mas %s -mas %s -ing 10000' - extract_func_brain.inputs.output_datatype = 'float' + extract_func_brain.inputs.op_string = "-div %s -mas %s -mas %s -ing 10000" + extract_func_brain.inputs.output_datatype = "float" - wf.connect(merge_func_to_standard, 'merged_file', - extract_func_brain, 'in_file') + wf.connect(merge_func_to_standard, "merged_file", extract_func_brain, "in_file") - wf.connect(merge_func_mask, 'out', - extract_func_brain, 'operand_files') + wf.connect(merge_func_mask, "out", extract_func_brain, "operand_files") - func_mask_final = pe.Node(interface=fsl.MultiImageMaths(), - name=f'func_mask_final_{pipe_num}') + func_mask_final = pe.Node( + interface=fsl.MultiImageMaths(), name=f"func_mask_final_{pipe_num}" + ) func_mask_final.inputs.op_string = "-mas %s " - wf.connect(applywarp_anat_mask_res, 'out_file', func_mask_final, 'in_file') + wf.connect(applywarp_anat_mask_res, "out_file", func_mask_final, "in_file") - wf.connect(find_min_mask, 'out_file', func_mask_final, 'operand_files') + wf.connect(find_min_mask, "out_file", func_mask_final, "operand_files") outputs = { - 'space-template_desc-preproc_bold': (extract_func_brain, 'out_file'), - 'space-template_desc-bold_mask': (func_mask_final, 'out_file') + "space-template_desc-preproc_bold": (extract_func_brain, "out_file"), + "space-template_desc-bold_mask": (func_mask_final, "out_file"), } return (wf, outputs) @@ -4338,14 +4745,10 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, ) ], outputs={ - "space-template_desc-preproc_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-brain_bold": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-bold_mask": { - "Template": "T1w-brain-template-funcreg"}, - "space-template_desc-head_bold": { - "Template": "T1w-brain-template-funcreg"}, + "space-template_desc-preproc_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-brain_bold": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-bold_mask": {"Template": "T1w-brain-template-funcreg"}, + "space-template_desc-head_bold": {"Template": "T1w-brain-template-funcreg"}, "space-template_res-derivative_desc-preproc_bold": { "Template": "T1w-brain-template-deriv" }, @@ -4354,9 +4757,10 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, }, }, ) -def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, - pipe_num, opt=None): - ''' +def single_step_resample_timeseries_to_T1template( + wf, cfg, strat_pool, pipe_num, opt=None +): + """ Apply motion correction, coreg, anat-to-template transforms on slice-time corrected functional timeseries based on fMRIPrep pipeline @@ -4393,213 +4797,238 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, OF THE POSSIBILITY OF SUCH DAMAGE. Ref: https://github.com/nipreps/fmriprep/blob/84a6005b/fmriprep/workflows/bold/resampling.py#L159-L419 - ''' # noqa: 501 - xfm_prov = strat_pool.get_cpac_provenance( - 'from-T1w_to-template_mode-image_xfm') + """ # noqa: 501 + xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) - bbr2itk = pe.Node(util.Function(input_names=['reference_file', - 'source_file', - 'transform_file'], - output_names=['itk_transform'], - function=run_c3d), - name=f'convert_bbr2itk_{pipe_num}') - - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'whole-head': - node, out = strat_pool.get_data('T1w') - wf.connect(node, out, bbr2itk, 'reference_file') + bbr2itk = pe.Node( + util.Function( + input_names=["reference_file", "source_file", "transform_file"], + output_names=["itk_transform"], + function=run_c3d, + ), + name=f"convert_bbr2itk_{pipe_num}", + ) - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration'][ - 'reference'] == 'brain': - node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, bbr2itk, 'reference_file') + if ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "whole-head" + ): + node, out = strat_pool.get_data("T1w") + wf.connect(node, out, bbr2itk, "reference_file") + + elif ( + cfg.registration_workflows["functional_registration"]["coregistration"][ + "boundary_based_registration" + ]["reference"] + == "brain" + ): + node, out = strat_pool.get_data("desc-preproc_T1w") + wf.connect(node, out, bbr2itk, "reference_file") - node, out = strat_pool.get_data('sbref') - wf.connect(node, out, bbr2itk, 'source_file') + node, out = strat_pool.get_data("sbref") + wf.connect(node, out, bbr2itk, "source_file") - node, out = strat_pool.get_data( - 'from-bold_to-T1w_mode-image_desc-linear_xfm') - wf.connect(node, out, bbr2itk, 'transform_file') + node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") + wf.connect(node, out, bbr2itk, "transform_file") - split_func = pe.Node(interface=fsl.Split(), - name=f'split_func_{pipe_num}') + split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") - split_func.inputs.dimension = 't' + split_func.inputs.dimension = "t" - node, out = strat_pool.get_data('desc-stc_bold') - wf.connect(node, out, split_func, 'in_file') + node, out = strat_pool.get_data("desc-stc_bold") + wf.connect(node, out, split_func, "in_file") ### Loop starts! ### - motionxfm2itk = pe.MapNode(util.Function( - input_names=['reference_file', - 'source_file', - 'transform_file'], - output_names=['itk_transform'], - function=run_c3d), - name=f'convert_motionxfm2itk_{pipe_num}', - iterfield=['transform_file']) - - node, out = strat_pool.get_data('motion-basefile') - wf.connect(node, out, motionxfm2itk, 'reference_file') - wf.connect(node, out, motionxfm2itk, 'source_file') - - node, out = strat_pool.get_data('coordinate-transformation') + motionxfm2itk = pe.MapNode( + util.Function( + input_names=["reference_file", "source_file", "transform_file"], + output_names=["itk_transform"], + function=run_c3d, + ), + name=f"convert_motionxfm2itk_{pipe_num}", + iterfield=["transform_file"], + ) + + node, out = strat_pool.get_data("motion-basefile") + wf.connect(node, out, motionxfm2itk, "reference_file") + wf.connect(node, out, motionxfm2itk, "source_file") + + node, out = strat_pool.get_data("coordinate-transformation") motion_correct_tool = check_prov_for_motion_tool( - strat_pool.get_cpac_provenance('coordinate-transformation')) - if motion_correct_tool == 'mcflirt': - wf.connect(node, out, motionxfm2itk, 'transform_file') - elif motion_correct_tool == '3dvolreg': - convert_transform = pe.Node(util.Function( - input_names=['one_d_filename'], - output_names=['transform_directory'], - function=one_d_to_mat, - imports=['import os', 'import numpy as np']), - name=f'convert_transform_{pipe_num}') - wf.connect(node, out, convert_transform, 'one_d_filename') - wf.connect(convert_transform, 'transform_directory', - motionxfm2itk, 'transform_file') + strat_pool.get_cpac_provenance("coordinate-transformation") + ) + if motion_correct_tool == "mcflirt": + wf.connect(node, out, motionxfm2itk, "transform_file") + elif motion_correct_tool == "3dvolreg": + convert_transform = pe.Node( + util.Function( + input_names=["one_d_filename"], + output_names=["transform_directory"], + function=one_d_to_mat, + imports=["import os", "import numpy as np"], + ), + name=f"convert_transform_{pipe_num}", + ) + wf.connect(node, out, convert_transform, "one_d_filename") + wf.connect( + convert_transform, "transform_directory", motionxfm2itk, "transform_file" + ) merge_num = 4 blip = False - if strat_pool.check_rpool('ants-blip-warp') and reg_tool == 'ants': - blip_node, blip_out = strat_pool.get_data('ants-blip-warp') + if strat_pool.check_rpool("ants-blip-warp") and reg_tool == "ants": + blip_node, blip_out = strat_pool.get_data("ants-blip-warp") merge_num = 5 blip = True - elif strat_pool.check_rpool('fsl-blip-warp') and reg_tool == 'fsl': - blip_node, blip_out = strat_pool.get_data('fsl-blip-warp') + elif strat_pool.check_rpool("fsl-blip-warp") and reg_tool == "fsl": + blip_node, blip_out = strat_pool.get_data("fsl-blip-warp") merge_num = 5 blip = True - collectxfm = pe.MapNode(util.Merge(merge_num), - name=f'collectxfm_func_to_standard_{pipe_num}', - iterfield=[f'in{merge_num}']) + collectxfm = pe.MapNode( + util.Merge(merge_num), + name=f"collectxfm_func_to_standard_{pipe_num}", + iterfield=[f"in{merge_num}"], + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, collectxfm, 'in1') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, collectxfm, "in1") - wf.connect(bbr2itk, 'itk_transform', - collectxfm, 'in2') + wf.connect(bbr2itk, "itk_transform", collectxfm, "in2") - collectxfm.inputs.in3 = 'identity' + collectxfm.inputs.in3 = "identity" if blip: - wf.connect(blip_node, blip_out, collectxfm, 'in4') + wf.connect(blip_node, blip_out, collectxfm, "in4") - wf.connect(motionxfm2itk, 'itk_transform', - collectxfm, f'in{merge_num}') + wf.connect(motionxfm2itk, "itk_transform", collectxfm, f"in{merge_num}") applyxfm_func_to_standard = pe.MapNode( interface=ants.ApplyTransforms(), - name=f'applyxfm_func_to_standard_{pipe_num}', - iterfield=['input_image', 'transforms']) + name=f"applyxfm_func_to_standard_{pipe_num}", + iterfield=["input_image", "transforms"], + ) applyxfm_func_to_standard.inputs.float = True - applyxfm_func_to_standard.inputs.interpolation = 'LanczosWindowedSinc' + applyxfm_func_to_standard.inputs.interpolation = "LanczosWindowedSinc" applyxfm_derivfunc_to_standard = pe.MapNode( interface=ants.ApplyTransforms(), - name=f'applyxfm_derivfunc_to_standard_{pipe_num}', - iterfield=['input_image', 'transforms']) + name=f"applyxfm_derivfunc_to_standard_{pipe_num}", + iterfield=["input_image", "transforms"], + ) applyxfm_derivfunc_to_standard.inputs.float = True - applyxfm_derivfunc_to_standard.inputs.interpolation = 'LanczosWindowedSinc' + applyxfm_derivfunc_to_standard.inputs.interpolation = "LanczosWindowedSinc" + + wf.connect(split_func, "out_files", applyxfm_func_to_standard, "input_image") + wf.connect(split_func, "out_files", applyxfm_derivfunc_to_standard, "input_image") - wf.connect(split_func, 'out_files', - applyxfm_func_to_standard, 'input_image') - wf.connect(split_func, 'out_files', - applyxfm_derivfunc_to_standard, 'input_image') + node, out = strat_pool.get_data("T1w-brain-template-funcreg") + wf.connect(node, out, applyxfm_func_to_standard, "reference_image") - node, out = strat_pool.get_data('T1w-brain-template-funcreg') - wf.connect(node, out, applyxfm_func_to_standard, 'reference_image') - - node, out = strat_pool.get_data('T1w-brain-template-deriv') - wf.connect(node, out, applyxfm_derivfunc_to_standard, 'reference_image') + node, out = strat_pool.get_data("T1w-brain-template-deriv") + wf.connect(node, out, applyxfm_derivfunc_to_standard, "reference_image") - wf.connect(collectxfm, 'out', applyxfm_func_to_standard, 'transforms') - wf.connect(collectxfm, 'out', applyxfm_derivfunc_to_standard, 'transforms') + wf.connect(collectxfm, "out", applyxfm_func_to_standard, "transforms") + wf.connect(collectxfm, "out", applyxfm_derivfunc_to_standard, "transforms") ### Loop ends! ### - merge_func_to_standard = pe.Node(interface=fslMerge(), - name=f'merge_func_to_standard_{pipe_num}') - merge_func_to_standard.inputs.dimension = 't' + merge_func_to_standard = pe.Node( + interface=fslMerge(), name=f"merge_func_to_standard_{pipe_num}" + ) + merge_func_to_standard.inputs.dimension = "t" - wf.connect(applyxfm_func_to_standard, 'output_image', - merge_func_to_standard, 'in_files') + wf.connect( + applyxfm_func_to_standard, "output_image", merge_func_to_standard, "in_files" + ) merge_derivfunc_to_standard = pe.Node( - interface=fslMerge(), name=f'merge_derivfunc_to_standard_{pipe_num}') - merge_derivfunc_to_standard.inputs.dimension = 't' + interface=fslMerge(), name=f"merge_derivfunc_to_standard_{pipe_num}" + ) + merge_derivfunc_to_standard.inputs.dimension = "t" - wf.connect(applyxfm_derivfunc_to_standard, 'output_image', - merge_derivfunc_to_standard, 'in_files') + wf.connect( + applyxfm_derivfunc_to_standard, + "output_image", + merge_derivfunc_to_standard, + "in_files", + ) applyxfm_func_mask_to_standard = pe.Node( interface=ants.ApplyTransforms(), - name=f'applyxfm_func_mask_to_standard_{pipe_num}') - applyxfm_func_mask_to_standard.inputs.interpolation = 'MultiLabel' + name=f"applyxfm_func_mask_to_standard_{pipe_num}", + ) + applyxfm_func_mask_to_standard.inputs.interpolation = "MultiLabel" - node, out = strat_pool.get_data('space-bold_desc-brain_mask') - wf.connect(node, out, applyxfm_func_mask_to_standard, 'input_image') + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, applyxfm_func_mask_to_standard, "input_image") - node, out = strat_pool.get_data('T1w-brain-template-funcreg') - wf.connect(node, out, applyxfm_func_mask_to_standard, 'reference_image') + node, out = strat_pool.get_data("T1w-brain-template-funcreg") + wf.connect(node, out, applyxfm_func_mask_to_standard, "reference_image") collectxfm_mask = pe.Node( - util.Merge(2), name=f'collectxfm_func_mask_to_standard_{pipe_num}') + util.Merge(2), name=f"collectxfm_func_mask_to_standard_{pipe_num}" + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, collectxfm_mask, 'in1') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, collectxfm_mask, "in1") - wf.connect(bbr2itk, 'itk_transform', collectxfm_mask, 'in2') + wf.connect(bbr2itk, "itk_transform", collectxfm_mask, "in2") - wf.connect(collectxfm_mask, 'out', - applyxfm_func_mask_to_standard, 'transforms') + wf.connect(collectxfm_mask, "out", applyxfm_func_mask_to_standard, "transforms") applyxfm_deriv_mask_to_standard = pe.Node( interface=ants.ApplyTransforms(), - name=f'applyxfm_deriv_mask_to_standard_{pipe_num}') - applyxfm_deriv_mask_to_standard.inputs.interpolation = 'MultiLabel' + name=f"applyxfm_deriv_mask_to_standard_{pipe_num}", + ) + applyxfm_deriv_mask_to_standard.inputs.interpolation = "MultiLabel" - node, out = strat_pool.get_data('space-bold_desc-brain_mask') - wf.connect(node, out, applyxfm_deriv_mask_to_standard, 'input_image') + node, out = strat_pool.get_data("space-bold_desc-brain_mask") + wf.connect(node, out, applyxfm_deriv_mask_to_standard, "input_image") - node, out = strat_pool.get_data('T1w-brain-template-deriv') - wf.connect(node, out, applyxfm_deriv_mask_to_standard, 'reference_image') + node, out = strat_pool.get_data("T1w-brain-template-deriv") + wf.connect(node, out, applyxfm_deriv_mask_to_standard, "reference_image") collectxfm_deriv_mask = pe.Node( - util.Merge(2), name=f'collectxfm_deriv_mask_to_standard_{pipe_num}') + util.Merge(2), name=f"collectxfm_deriv_mask_to_standard_{pipe_num}" + ) - node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') - wf.connect(node, out, collectxfm_deriv_mask, 'in1') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, collectxfm_deriv_mask, "in1") - wf.connect(bbr2itk, 'itk_transform', - collectxfm_deriv_mask, 'in2') + wf.connect(bbr2itk, "itk_transform", collectxfm_deriv_mask, "in2") - wf.connect(collectxfm_deriv_mask, 'out', - applyxfm_deriv_mask_to_standard, 'transforms') + wf.connect( + collectxfm_deriv_mask, "out", applyxfm_deriv_mask_to_standard, "transforms" + ) - apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_func_brain_to_standard_{pipe_num}') + apply_mask = pe.Node( + interface=fsl.maths.ApplyMask(), name=f"get_func_brain_to_standard_{pipe_num}" + ) - wf.connect(merge_func_to_standard, 'merged_file', - apply_mask, 'in_file') + wf.connect(merge_func_to_standard, "merged_file", apply_mask, "in_file") - wf.connect(applyxfm_func_mask_to_standard, 'output_image', - apply_mask, 'mask_file') + wf.connect(applyxfm_func_mask_to_standard, "output_image", apply_mask, "mask_file") outputs = { - 'space-template_desc-head_bold': (merge_func_to_standard, - 'merged_file'), - 'space-template_desc-brain_bold': (apply_mask, 'out_file'), - 'space-template_desc-preproc_bold': (apply_mask, 'out_file'), - 'space-template_desc-bold_mask': (applyxfm_func_mask_to_standard, - 'output_image'), - 'space-template_res-derivative_desc-preproc_bold': - (merge_derivfunc_to_standard, 'merged_file'), - 'space-template_res-derivative_desc-bold_mask': - (applyxfm_deriv_mask_to_standard, 'output_image') + "space-template_desc-head_bold": (merge_func_to_standard, "merged_file"), + "space-template_desc-brain_bold": (apply_mask, "out_file"), + "space-template_desc-preproc_bold": (apply_mask, "out_file"), + "space-template_desc-bold_mask": ( + applyxfm_func_mask_to_standard, + "output_image", + ), + "space-template_res-derivative_desc-preproc_bold": ( + merge_derivfunc_to_standard, + "merged_file", + ), + "space-template_res-derivative_desc-bold_mask": ( + applyxfm_deriv_mask_to_standard, + "output_image", + ), } return (wf, outputs) @@ -4620,18 +5049,24 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, outputs={ "space-template_sbref": { "Description": "Single-volume sbref of the BOLD time-series " - "transformed to template space.", + "transformed to template space.", "Template": "T1w-brain-template-funcreg", } }, ) def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-template_mode-image_xfm' + xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'sbref', xfm, - reference='T1w-brain-template-funcreg', time_series=False)[:2] - outputs = {'space-template_sbref': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "sbref", + xfm, + reference="T1w-brain-template-funcreg", + time_series=False, + )[:2] + outputs = {"space-template_sbref": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4659,15 +5094,22 @@ def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): "T1w-brain-template-funcreg", ], outputs={ - "space-template_desc-bold_mask": { - "Template": "T1w-brain-template-funcreg"}}) + "space-template_desc-bold_mask": {"Template": "T1w-brain-template-funcreg"} + }, +) def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-template_mode-image_xfm' + xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - reference='T1w-brain-template-funcreg', time_series=False)[:2] - outputs = {'space-template_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + reference="T1w-brain-template-funcreg", + time_series=False, + )[:2] + outputs = {"space-template_desc-bold_mask": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4701,15 +5143,26 @@ def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - '''Transform the BOLD mask to template space and to the resolution set for + """Transform the BOLD mask to template space and to the resolution set for the derivative outputs. - ''' - xfm = 'from-bold_to-template_mode-image_xfm' + """ + xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - reference='T1w-brain-template-deriv', time_series=False)[:2] - outputs = {'space-template_res-derivative_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + reference="T1w-brain-template-deriv", + time_series=False, + )[:2] + outputs = { + "space-template_res-derivative_desc-bold_mask": ( + apply_xfm, + "outputspec.output_image", + ) + } return _warp_return(wf, apply_xfm, outputs) @@ -4728,12 +5181,11 @@ def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-preproc_bold": {"Template": "EPI-template"}}, ) def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm, resource = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'desc-preproc_bold', xfm, - time_series=True) - outputs = {f'space-template_{resource}': - (apply_xfm, 'outputspec.output_image')} + wf, cfg, strat_pool, pipe_num, "desc-preproc_bold", xfm, time_series=True + ) + outputs = {f"space-template_{resource}": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4752,12 +5204,11 @@ def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-mean_bold": {"Template": "EPI-template"}}, ) def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'desc-mean_bold', xfm, - time_series=False)[:2] - outputs = {'space-template_desc-mean_bold': - (apply_xfm, 'outputspec.output_image')} + wf, cfg, strat_pool, pipe_num, "desc-mean_bold", xfm, time_series=False + )[:2] + outputs = {"space-template_desc-mean_bold": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4770,19 +5221,23 @@ def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): ], switch=["run_EPI"], inputs=[ - ("space-bold_desc-brain_mask", - "from-bold_to-EPItemplate_mode-image_xfm"), + ("space-bold_desc-brain_mask", "from-bold_to-EPItemplate_mode-image_xfm"), "EPI-template", ], outputs={"space-template_desc-bold_mask": {"Template": "EPI-template"}}, ) def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - time_series=False)[:2] - outputs = {'space-template_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + time_series=False, + )[:2] + outputs = {"space-template_desc-bold_mask": (apply_xfm, "outputspec.output_image")} return _warp_return(wf, apply_xfm, outputs) @@ -4795,25 +5250,33 @@ def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): ], switch=["run_EPI"], inputs=[ - ("space-bold_desc-brain_mask", - "from-bold_to-EPItemplate_mode-image_xfm"), + ("space-bold_desc-brain_mask", "from-bold_to-EPItemplate_mode-image_xfm"), "EPI-template", ], outputs={ - "space-template_res-derivative_desc-bold_mask": { - "Template": "EPI-template"} + "space-template_res-derivative_desc-bold_mask": {"Template": "EPI-template"} }, ) def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - '''Transform the BOLD mask to template space and to the resolution set for + """Transform the BOLD mask to template space and to the resolution set for the derivative outputs. - ''' - xfm = 'from-bold_to-EPItemplate_mode-image_xfm' + """ + xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, 'space-bold_desc-brain_mask', xfm, - time_series=False)[:2] - outputs = {'space-template_res-derivative_desc-bold_mask': - (apply_xfm, 'outputspec.output_image')} + wf, + cfg, + strat_pool, + pipe_num, + "space-bold_desc-brain_mask", + xfm, + time_series=False, + )[:2] + outputs = { + "space-template_res-derivative_desc-bold_mask": ( + apply_xfm, + "outputspec.output_image", + ) + } return _warp_return(wf, apply_xfm, outputs) @@ -4836,9 +5299,14 @@ def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - return warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, - xfm='from-T1w_to-template_mode-image_' - 'xfm', template_space='T1') + return warp_tissuemask_to_template( + wf, + cfg, + strat_pool, + pipe_num, + xfm="from-T1w_to-template_mode-image_" "xfm", + template_space="T1", + ) @nodeblock( @@ -4865,15 +5333,18 @@ def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - return warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, - xfm='from-bold_to-EPItemplate_' - 'mode-image_xfm', - template_space='EPI') + return warp_tissuemask_to_template( + wf, + cfg, + strat_pool, + pipe_num, + xfm="from-bold_to-EPItemplate_" "mode-image_xfm", + template_space="EPI", + ) -def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, - template_space): - '''Function to apply transforms to tissue masks +def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, template_space): + """Function to apply transforms to tissue masks Parameters ---------- @@ -4891,27 +5362,42 @@ def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, wf : nipype.pipeline.engine.workflows.Workflow outputs : dict - ''' - tissue_types = ['CSF', 'WM', 'GM'] + """ + tissue_types = ["CSF", "WM", "GM"] apply_xfm = {} for tissue in tissue_types: wf, apply_xfm[tissue] = warp_resource_to_template( - wf, cfg, strat_pool, pipe_num, f'label-{tissue}_mask', xfm, - time_series=False)[:2] - if template_space == 'T1': - template_space = '' - outputs = {f'space-{template_space}template_label-{tissue}_mask': ( - apply_xfm[tissue], 'outputspec.output_image') for - tissue in tissue_types} + wf, + cfg, + strat_pool, + pipe_num, + f"label-{tissue}_mask", + xfm, + time_series=False, + )[:2] + if template_space == "T1": + template_space = "" + outputs = { + f"space-{template_space}template_label-{tissue}_mask": ( + apply_xfm[tissue], + "outputspec.output_image", + ) + for tissue in tissue_types + } return _warp_return(wf, apply_xfm, outputs) -def warp_resource_to_template(wf: pe.Workflow, cfg, strat_pool, pipe_num: int, - input_resource: LIST_OR_STR, xfm: str, - reference: Optional[str] = None, - time_series: Optional[bool] = False - ) -> TUPLE[pe.Workflow, pe.Workflow, str]: - '''Function to warp a resource into a template space +def warp_resource_to_template( + wf: pe.Workflow, + cfg, + strat_pool, + pipe_num: int, + input_resource: LIST_OR_STR, + xfm: str, + reference: Optional[str] = None, + time_series: Optional[bool] = False, +) -> TUPLE[pe.Workflow, pe.Workflow, str]: + """Function to warp a resource into a template space Parameters ---------- @@ -4946,53 +5432,58 @@ def warp_resource_to_template(wf: pe.Workflow, cfg, strat_pool, pipe_num: int, resource : str key of input resource in strat_pool - ''' + """ # determine space we're warping to - template_space = xfm.split('_to-', 1)[1].split('template')[0] - if template_space == '': - template_space = 'T1w' + template_space = xfm.split("_to-", 1)[1].split("template")[0] + if template_space == "": + template_space = "T1w" # determine tool used for registration xfm_prov = strat_pool.get_cpac_provenance(xfm) reg_tool = check_prov_for_regtool(xfm_prov) # set 'resource' if strat_pool.check_rpool(input_resource): - resource, input_resource = strat_pool.get_data(input_resource, - report_fetched=True) + resource, input_resource = strat_pool.get_data( + input_resource, report_fetched=True + ) else: return wf, None, input_resource # set 'reference' if not passed and determine subworkflow name if reference is None: subwf_input_name = input_resource - reference = f'{template_space}-template' + reference = f"{template_space}-template" else: - subwf_input_name = '-'.join([ - reference.split('-')[-1].split('_')[-1], - input_resource.split('-')[-1].split('_')[-1]]) + subwf_input_name = "-".join( + [ + reference.split("-")[-1].split("_")[-1], + input_resource.split("-")[-1].split("_")[-1], + ] + ) # set up 'apply_transform' subworkflow - apply_xfm = apply_transform(f'warp_{subwf_input_name}_to_' - f'{template_space}template_{pipe_num}', - reg_tool, time_series=time_series, - num_cpus=cfg.pipeline_setup['system_config'][ - 'max_cores_per_participant'], - num_ants_cores=cfg.pipeline_setup[ - 'system_config']['num_ants_threads']) + apply_xfm = apply_transform( + f"warp_{subwf_input_name}_to_" f"{template_space}template_{pipe_num}", + reg_tool, + time_series=time_series, + num_cpus=cfg.pipeline_setup["system_config"]["max_cores_per_participant"], + num_ants_cores=cfg.pipeline_setup["system_config"]["num_ants_threads"], + ) # set appropriate 'interpolation' input based on registration tool - if reg_tool == 'ants': - apply_xfm.inputs.inputspec.interpolation = 'NearestNeighbor' - elif reg_tool == 'fsl': - apply_xfm.inputs.inputspec.interpolation = 'nn' + if reg_tool == "ants": + apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor" + elif reg_tool == "fsl": + apply_xfm.inputs.inputspec.interpolation = "nn" # connect nodes to subworkflow node, out = resource - wf.connect(node, out, apply_xfm, 'inputspec.input_image') + wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data(reference) - wf.connect(node, out, apply_xfm, 'inputspec.reference') + wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data(xfm) - wf.connect(node, out, apply_xfm, 'inputspec.transform') + wf.connect(node, out, apply_xfm, "inputspec.transform") return wf, apply_xfm, input_resource -def _warp_return(wf: pe.Workflow, apply_xfm: Optional[pe.Workflow], - outputs: dict) -> TUPLE[pe.Workflow, dict]: +def _warp_return( + wf: pe.Workflow, apply_xfm: Optional[pe.Workflow], outputs: dict +) -> TUPLE[pe.Workflow, dict]: """Check if we have a transform to apply; if not, don't add the outputs""" if apply_xfm is None: return wf, {} diff --git a/CPAC/registration/tests/mocks.py b/CPAC/registration/tests/mocks.py index b0a1000499..0bdf2f678b 100644 --- a/CPAC/registration/tests/mocks.py +++ b/CPAC/registration/tests/mocks.py @@ -1,111 +1,159 @@ import os + from nipype.interfaces import utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import resolve_resolution from CPAC.utils.interfaces.function import Function from CPAC.utils.strategy import Strategy + def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=['file']), name='file_node_{0}'.format(file_node_num) + util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" ) input_node.inputs.file = path - return input_node, 'file' + return input_node, "file" + -def configuration_strategy_mock( method = 'FSL' ): +def configuration_strategy_mock(method="FSL"): fsldir = os.environ.get("FSLDIR") # mock the config dictionary - c = Configuration({ - "num_ants_threads": 4, - "workingDirectory": "/scratch/pipeline_tests", - "crashLogDirectory": "/scratch", - "outputDirectory": "/output/output/pipeline_analysis_nuisance/sub-M10978008_ses-NFB3", - "resolution_for_func_preproc": "3mm", - "resolution_for_func_derivative": "3mm", - "template_for_resample": f"{fsldir}/data/standard/" - "MNI152_T1_1mm_brain.nii.gz", - "template_brain_only_for_func": f"{fsldir}/data/standard/" - r"MNI152_T1_${func_resolution}_" - "brain.nii.gz", - "template_skull_for_func": f"{fsldir}/data/standard/" - r"MNI152_T1_${func_resolution}.nii.gz", - "identityMatrix": f"{fsldir}/etc/flirtsch/ident.mat", - "funcRegFSLinterpolation": "sinc", - "funcRegANTSinterpolation": "LanczosWindowedSinc" - }) - - if method == 'ANTS': - c.update('regOption', 'ANTS') + c = Configuration( + { + "num_ants_threads": 4, + "workingDirectory": "/scratch/pipeline_tests", + "crashLogDirectory": "/scratch", + "outputDirectory": "/output/output/pipeline_analysis_nuisance/sub-M10978008_ses-NFB3", + "resolution_for_func_preproc": "3mm", + "resolution_for_func_derivative": "3mm", + "template_for_resample": f"{fsldir}/data/standard/" + "MNI152_T1_1mm_brain.nii.gz", + "template_brain_only_for_func": f"{fsldir}/data/standard/" + r"MNI152_T1_${func_resolution}_" + "brain.nii.gz", + "template_skull_for_func": f"{fsldir}/data/standard/" + r"MNI152_T1_${func_resolution}.nii.gz", + "identityMatrix": f"{fsldir}/etc/flirtsch/ident.mat", + "funcRegFSLinterpolation": "sinc", + "funcRegANTSinterpolation": "LanczosWindowedSinc", + } + ) + + if method == "ANTS": + c.update("regOption", "ANTS") else: - c.update('regOption', 'FSL') + c.update("regOption", "FSL") # mock the strategy strat = Strategy() resource_dict = { - "mean_functional": os.path.join(c.outputDirectory, - "mean_functional/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat.nii.gz"), - "motion_correct": os.path.join(c.outputDirectory, - "motion_correct/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg.nii.gz"), - "anatomical_brain": os.path.join(c.outputDirectory, - "anatomical_brain/sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz"), - "ants_initial_xfm": os.path.join(c.outputDirectory, - "ants_initial_xfm/transform0DerivedInitialMovingTranslation.mat"), - "ants_affine_xfm": os.path.join(c.outputDirectory, - "ants_affine_xfm/transform2Affine.mat"), - "ants_rigid_xfm": os.path.join(c.outputDirectory, - "ants_rigid_xfm/transform1Rigid.mat"), - "anatomical_to_mni_linear_xfm": os.path.join(c.outputDirectory, - "anatomical_to_mni_linear_xfm/sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat"), - "functional_to_anat_linear_xfm": os.path.join(c.outputDirectory, - "functional_to_anat_linear_xfm/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_flirt.mat"), - 'ants_symm_warp_field': os.path.join(c.outputDirectory, - "anatomical_to_symmetric_mni_nonlinear_xfm/transform3Warp.nii.gz"), - 'ants_symm_affine_xfm': os.path.join(c.outputDirectory, - "ants_symmetric_affine_xfm/transform2Affine.mat"), - 'ants_symm_rigid_xfm': os.path.join(c.outputDirectory, - "ants_symmetric_rigid_xfm/transform1Rigid.mat"), - 'ants_symm_initial_xfm': os.path.join(c.outputDirectory, - "ants_symmetric_initial_xfm/transform0DerivedInitialMovingTranslation.mat"), - "dr_tempreg_maps_files": [os.path.join('/scratch', 'resting_preproc_sub-M10978008_ses-NFB3_cpac105', 'temporal_dual_regression_0/_scan_test/_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_..cpac_templates..PNAS_Smith09_rsn10.nii.gz/split_raw_volumes/temp_reg_map_000{0}.nii.gz'.format(n)) for n in range(10)] + "mean_functional": os.path.join( + c.outputDirectory, + "mean_functional/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat.nii.gz", + ), + "motion_correct": os.path.join( + c.outputDirectory, + "motion_correct/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg.nii.gz", + ), + "anatomical_brain": os.path.join( + c.outputDirectory, + "anatomical_brain/sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", + ), + "ants_initial_xfm": os.path.join( + c.outputDirectory, + "ants_initial_xfm/transform0DerivedInitialMovingTranslation.mat", + ), + "ants_affine_xfm": os.path.join( + c.outputDirectory, "ants_affine_xfm/transform2Affine.mat" + ), + "ants_rigid_xfm": os.path.join( + c.outputDirectory, "ants_rigid_xfm/transform1Rigid.mat" + ), + "anatomical_to_mni_linear_xfm": os.path.join( + c.outputDirectory, + "anatomical_to_mni_linear_xfm/sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat", + ), + "functional_to_anat_linear_xfm": os.path.join( + c.outputDirectory, + "functional_to_anat_linear_xfm/_scan_test/sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_flirt.mat", + ), + "ants_symm_warp_field": os.path.join( + c.outputDirectory, + "anatomical_to_symmetric_mni_nonlinear_xfm/transform3Warp.nii.gz", + ), + "ants_symm_affine_xfm": os.path.join( + c.outputDirectory, "ants_symmetric_affine_xfm/transform2Affine.mat" + ), + "ants_symm_rigid_xfm": os.path.join( + c.outputDirectory, "ants_symmetric_rigid_xfm/transform1Rigid.mat" + ), + "ants_symm_initial_xfm": os.path.join( + c.outputDirectory, + "ants_symmetric_initial_xfm/transform0DerivedInitialMovingTranslation.mat", + ), + "dr_tempreg_maps_files": [ + os.path.join( + "/scratch", + "resting_preproc_sub-M10978008_ses-NFB3_cpac105", + f"temporal_dual_regression_0/_scan_test/_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_..cpac_templates..PNAS_Smith09_rsn10.nii.gz/split_raw_volumes/temp_reg_map_000{n}.nii.gz", + ) + for n in range(10) + ], } - if method == 'ANTS': - resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join(c.outputDirectory, - "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz") + if method == "ANTS": + resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( + c.outputDirectory, "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz" + ) else: - resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join(c.outputDirectory, - "anatomical_to_mni_nonlinear_xfm/sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz") - + resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( + c.outputDirectory, + "anatomical_to_mni_nonlinear_xfm/sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz", + ) + file_node_num = 0 for resource, filepath in resource_dict.items(): - strat.update_resource_pool({ - resource: file_node(filepath, file_node_num) - }) - strat.append_name(resource+'_0') + strat.update_resource_pool({resource: file_node(filepath, file_node_num)}) + strat.append_name(resource + "_0") file_node_num += 1 templates_for_resampling = [ - (c.resolution_for_func_preproc, c.template_brain_only_for_func, - 'template_brain_for_func_preproc', 'resolution_for_func_preproc'), - (c.resolution_for_func_preproc, c.template_brain_only_for_func, - 'template_skull_for_func_preproc', 'resolution_for_func_preproc') + ( + c.resolution_for_func_preproc, + c.template_brain_only_for_func, + "template_brain_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + c.resolution_for_func_preproc, + c.template_brain_only_for_func, + "template_skull_for_func_preproc", + "resolution_for_func_preproc", + ), ] for resolution, template, template_name, tag in templates_for_resampling: - resampled_template = pe.Node(Function(input_names = ['resolution', 'template', 'template_name', 'tag'], - output_names = ['resampled_template'], - function = resolve_resolution, - as_module = True), - name = 'resampled_' + template_name) + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + template_name, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag - strat.update_resource_pool({template_name: (resampled_template, 'resampled_template')}) - strat.append_name('resampled_template_0') + strat.update_resource_pool( + {template_name: (resampled_template, "resampled_template")} + ) + strat.append_name("resampled_template_0") return c, strat diff --git a/CPAC/registration/tests/test_registration.py b/CPAC/registration/tests/test_registration.py index 430e71ef7b..4b8edea0cd 100755 --- a/CPAC/registration/tests/test_registration.py +++ b/CPAC/registration/tests/test_registration.py @@ -1,13 +1,13 @@ import pytest -@pytest.mark.skip(reason='needs refactoring') +@pytest.mark.skip(reason="needs refactoring") def test_nonlinear_register(): - from ..registration import create_nonlinear_register - + from nipype.interfaces import fsl + from CPAC.pipeline import nipype_pipeline_engine as pe - import nipype.interfaces.fsl as fsl - + from ..registration import create_nonlinear_register + ## necessary inputs ## -input_brain ## -input_skull @@ -15,146 +15,144 @@ def test_nonlinear_register(): ## -reference_skull ## -fnirt_config ## -fnirt_warp_res - + ## input_brain - anat_bet_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz' - + anat_bet_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz" + ## input_skull - + ## reference_brain - mni_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' - + mni_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + ## reference_skull - + ## fnirt_config - fnirt_config = 'T1_2_MNI152_3mm' - + fnirt_config = "T1_2_MNI152_3mm" + ## fnirt_warp_res fnirt_warp_res = None - - #?? what is this for?: - func_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz' - - - mni_workflow = pe.Workflow(name='mni_workflow') - - linear_reg = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_0') - linear_reg.inputs.cost = 'corratio' + + # ?? what is this for?: + func_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz" + + mni_workflow = pe.Workflow(name="mni_workflow") + + linear_reg = pe.Node(interface=fsl.FLIRT(), name="linear_reg_0") + linear_reg.inputs.cost = "corratio" linear_reg.inputs.dof = 6 - linear_reg.inputs.interp = 'nearestneighbour' - + linear_reg.inputs.interp = "nearestneighbour" + linear_reg.inputs.in_file = func_file linear_reg.inputs.reference = anat_bet_file - - #T1 to MNI Node + + # T1 to MNI Node c = create_nonlinear_register() c.inputs.inputspec.input = anat_bet_file - c.inputs.inputspec.reference = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' - c.inputs.inputspec.fnirt_config = 'T1_2_MNI152_3mm' - - #EPI to MNI warp Node - mni_warp = pe.Node(interface=fsl.ApplyWarp(), - name='mni_warp') - mni_warp.inputs.ref_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' + c.inputs.inputspec.reference = ( + "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + ) + c.inputs.inputspec.fnirt_config = "T1_2_MNI152_3mm" + + # EPI to MNI warp Node + mni_warp = pe.Node(interface=fsl.ApplyWarp(), name="mni_warp") + mni_warp.inputs.ref_file = ( + "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + ) mni_warp.inputs.in_file = func_file - mni_workflow.connect(c, 'outputspec.nonlinear_xfm', - mni_warp, 'field_file') - mni_workflow.connect(linear_reg, 'out_matrix_file', - mni_warp, 'premat') - - mni_workflow.base_dir = './' - mni_workflow.run() + mni_workflow.connect(c, "outputspec.nonlinear_xfm", mni_warp, "field_file") + mni_workflow.connect(linear_reg, "out_matrix_file", mni_warp, "premat") + + mni_workflow.base_dir = "./" + mni_workflow.run() -@pytest.mark.skip(reason='needs refactoring') +@pytest.mark.skip(reason="needs refactoring") def test_registration(): - from ..registration import create_nonlinear_register - from ..registration import create_register_func_to_mni - from CPAC.pipeline import nipype_pipeline_engine as pe - import nipype.interfaces.fsl as fsl - - func_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz' - anat_skull_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_reorient/mprage_anonymized_RPI.nii.gz' - anat_bet_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz' - mni_brain_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz' - mni_skull_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm.nii.gz' - - - mni_workflow = pe.Workflow(name='mni_workflow') - + from ..registration import create_nonlinear_register, create_register_func_to_mni + + func_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz" + anat_skull_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_reorient/mprage_anonymized_RPI.nii.gz" + anat_bet_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz" + mni_brain_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" + mni_skull_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm.nii.gz" + + mni_workflow = pe.Workflow(name="mni_workflow") + nr = create_nonlinear_register() nr.inputs.inputspec.input_brain = anat_bet_file nr.inputs.inputspec.input_skull = anat_skull_file nr.inputs.inputspec.reference_brain = mni_brain_file nr.inputs.inputspec.reference_skull = mni_skull_file - nr.inputs.inputspec.fnirt_config = '/usr/share/fsl/4.1/etc/flirtsch/T1_2_MNI152_3mm.cnf' + nr.inputs.inputspec.fnirt_config = ( + "/usr/share/fsl/4.1/etc/flirtsch/T1_2_MNI152_3mm.cnf" + ) func2mni = create_register_func_to_mni() func2mni.inputs.inputspec.func = func_file func2mni.inputs.inputspec.mni = mni_brain_file func2mni.inputs.inputspec.anat = anat_bet_file - - mni_workflow.connect(nr, 'outputspec.nonlinear_xfm', - func2mni, 'inputspec.anat_to_mni_xfm') - mni_workflow.base_dir = './mni_05676_3' + + mni_workflow.connect( + nr, "outputspec.nonlinear_xfm", func2mni, "inputspec.anat_to_mni_xfm" + ) + mni_workflow.base_dir = "./mni_05676_3" mni_workflow.run() -@pytest.mark.skip(reason='needs refactoring') +@pytest.mark.skip(reason="needs refactoring") def test_registration_lesion(): import os - from CPAC.pipeline import nipype_pipeline_engine as pe - from ..registration import create_wf_calculate_ants_warp + from CPAC.anat_preproc.anat_preproc import create_anat_preproc from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc + from CPAC.pipeline import nipype_pipeline_engine as pe + from ..registration import create_wf_calculate_ants_warp # Skull stripped anat image - anat_file = '/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w.nii.gz' - lesion_file = '/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w_lesion-mask.nii.gz' - mni_brain_file = (f'{os.environ.get("FSLDIR")}/data/standard/' - 'MNI152_T1_3mm_brain.nii.gz') + anat_file = ( + "/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w.nii.gz" + ) + lesion_file = "/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w_lesion-mask.nii.gz" + mni_brain_file = ( + f'{os.environ.get("FSLDIR")}/data/standard/' 'MNI152_T1_3mm_brain.nii.gz' + ) if not os.path.exists(anat_file): - raise IOError(anat_file + ' not found') + raise IOError(anat_file + " not found") if not os.path.exists(lesion_file): - raise IOError(lesion_file + ' not found') + raise IOError(lesion_file + " not found") if not os.path.exists(mni_brain_file): - raise IOError(mni_brain_file + ' not found') + raise IOError(mni_brain_file + " not found") - wf = pe.Workflow(name='test_reg_lesion') + wf = pe.Workflow(name="test_reg_lesion") - anat_preproc = create_anat_preproc(method='mask', - already_skullstripped=True, - wf_name='anat_preproc') + anat_preproc = create_anat_preproc( + method="mask", already_skullstripped=True, wf_name="anat_preproc" + ) anat_preproc.inputs.inputspec.anat = anat_file - lesion_preproc = create_lesion_preproc( - wf_name='lesion_preproc' - ) + lesion_preproc = create_lesion_preproc(wf_name="lesion_preproc") lesion_preproc.inputs.inputspec.lesion = lesion_file - ants_reg_anat_mni = \ - create_wf_calculate_ants_warp( - 'anat_mni_ants_register', - 0, - num_threads=4 - ) + ants_reg_anat_mni = create_wf_calculate_ants_warp( + "anat_mni_ants_register", 0, num_threads=4 + ) # pass the reference file ants_reg_anat_mni.inputs.inputspec.reference_brain = mni_brain_file wf.connect( - anat_preproc, 'outputspec.reorient', - ants_reg_anat_mni, 'inputspec.moving_brain' + anat_preproc, "outputspec.reorient", ants_reg_anat_mni, "inputspec.moving_brain" ) wf.connect( - lesion_preproc, 'outputspec.reorient', - ants_reg_anat_mni, 'inputspec.fixed_image_mask' + lesion_preproc, + "outputspec.reorient", + ants_reg_anat_mni, + "inputspec.fixed_image_mask", ) ants_reg_anat_mni.inputs.inputspec.set( @@ -162,30 +160,22 @@ def test_registration_lesion(): use_histogram_matching=True, winsorize_lower_quantile=0.01, winsorize_upper_quantile=0.99, - metric=['MI', 'MI', 'CC'], + metric=["MI", "MI", "CC"], metric_weight=[1, 1, 1], radius_or_number_of_bins=[32, 32, 4], - sampling_strategy=['Regular', 'Regular', None], + sampling_strategy=["Regular", "Regular", None], sampling_percentage=[0.25, 0.25, None], number_of_iterations=[ [1000, 500, 250, 100], [1000, 500, 250, 100], - [100, 100, 70, 20] + [100, 100, 70, 20], ], convergence_threshold=[1e-8, 1e-8, 1e-9], convergence_window_size=[10, 10, 15], - transforms=['Rigid', 'Affine', 'SyN'], + transforms=["Rigid", "Affine", "SyN"], transform_parameters=[[0.1], [0.1], [0.1, 3, 0]], - shrink_factors=[ - [8, 4, 2, 1], - [8, 4, 2, 1], - [6, 4, 2, 1] - ], - smoothing_sigmas=[ - [3, 2, 1, 0], - [3, 2, 1, 0], - [3, 2, 1, 0] - ] + shrink_factors=[[8, 4, 2, 1], [8, 4, 2, 1], [6, 4, 2, 1]], + smoothing_sigmas=[[3, 2, 1, 0], [3, 2, 1, 0], [3, 2, 1, 0]], ) wf.run() diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 67e1277c79..ea7b4db5b8 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -352,7 +352,8 @@ anatomical_preproc: monkey: Off FSL-BET: - # Swich "On" to crop out neck regions before generating the mask (default: Off). + + # Switch "On" to crop out neck regions before generating the mask (default: Off). Robustfov: Off # Set the threshold value controling the brain vs non-brain voxels, default is 0.5 diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index cb0bca639c..d3008f0c09 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -200,7 +200,7 @@ surface_analysis: # Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives. # If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline, # select those 'Freesurfer-' labeled options further below in anatomical_preproc. - freesurfer: + freesurfer: run_reconall: Off @@ -212,7 +212,7 @@ surface_analysis: # Run ABCD-HCP post FreeSurfer and fMRISurface pipeline - post_freesurfer: + post_freesurfer: run: Off @@ -280,7 +280,7 @@ anatomical_preproc: run_t2: Off # Non-local means filtering via ANTs DenoiseImage - non_local_means_filtering: + non_local_means_filtering: # this is a fork option run: [Off] @@ -298,12 +298,12 @@ anatomical_preproc: shrink_factor: 2 # Bias field correction based on square root of T1w * T2w - t1t2_bias_field_correction: + t1t2_bias_field_correction: + + run: Off - run: Off - BiasFieldSmoothingSigma: 5 - + acpc_alignment: run: Off @@ -316,17 +316,17 @@ anatomical_preproc: # Default: 150mm for human data. brain_size: 150 - # Choose a tool to crop the FOV in ACPC alignment. - # Using FSL's robustfov or flirt command. - # Default: robustfov for human data, flirt for monkey data. + # Choose a tool to crop the FOV in ACPC alignment. + # Using FSL's robustfov or flirt command. + # Default: robustfov for human data, flirt for monkey data. FOV_crop: robustfov - + # ACPC Target # options: 'brain' or 'whole-head' # note: 'brain' requires T1w_brain_ACPC_template below to be populated acpc_target: 'whole-head' - # Run ACPC alignment on brain mask + # Run ACPC alignment on brain mask # If the brain mask is in native space, turn it on # If the brain mask is ACPC aligned, turn it off align_brain_mask: Off @@ -338,7 +338,7 @@ anatomical_preproc: T2w_brain_ACPC_template: None brain_extraction: - + run: On # using: ['3dSkullStrip', 'BET', 'UNet', 'niworkflows-ants', 'FreeSurfer-ABCD', 'FreeSurfer-BET-Tight', 'FreeSurfer-BET-Loose', 'FreeSurfer-Brainmask'] @@ -412,9 +412,9 @@ anatomical_preproc: monkey: False FSL-BET: - # Switch "On" to crop out neck regions before generating the mask (default: Off). + # Switch "On" to crop out neck regions before generating the mask (default: Off). Robustfov : Off - + # Set the threshold value controling the brain vs non-brain voxels, default is 0.5 frac: 0.5 @@ -715,21 +715,21 @@ registration_workflows: fnirt_config: T1_2_MNI152_2mm # The resolution to which anatomical images should be transformed during registration. - # This is the resolution at which processed anatomical files will be output. + # This is the resolution at which processed anatomical files will be output. # specifically for monkey pipeline ref_resolution: 2mm # Reference mask for FSL registration. ref_mask: $FSLDIR/data/standard/MNI152_T1_${resolution_for_anat}_brain_mask_dil.nii.gz - + # Template to be used during registration. - # It is for monkey pipeline specifically. + # It is for monkey pipeline specifically. FNIRT_T1w_brain_template: None # Template to be used during registration. - # It is for monkey pipeline specifically. + # It is for monkey pipeline specifically. FNIRT_T1w_template: None - + # Interpolation method for writing out transformed anatomical images. # Possible values: trilinear, sinc, spline interpolation: sinc @@ -922,10 +922,10 @@ registration_workflows: # these options modify the application (to the functional data), not the calculation, of the # T1-to-template and EPI-to-template transforms calculated earlier during registration - + # apply the functional-to-template (T1 template) registration transform to the functional data run: On - + # apply the functional-to-template (EPI template) registration transform to the functional data run_EPI: Off @@ -946,7 +946,7 @@ registration_workflows: # thus, a higher resolution may not result in a large increase in RAM needs as above func_derivative_outputs: 3mm - target_template: + target_template: # choose which template space to transform derivatives towards # using: ['T1_template', 'EPI_template'] # this is a fork point @@ -1014,11 +1014,11 @@ functional_preproc: run: On - update_header: + update_header: # Convert raw data from LPI to RPI run: On - + truncation: # First timepoint to include in analysis. @@ -1063,10 +1063,10 @@ functional_preproc: tzero: None motion_estimates_and_correction: - + run: On - motion_estimates: + motion_estimates: # calculate motion statistics BEFORE slice-timing correction calculate_motion_first: Off @@ -1206,48 +1206,48 @@ functional_preproc: # Set the threshold value for the skull-stripping of the magnitude file. Depending on the data, a tighter extraction may be necessary in order to prevent noisy voxels from interfering with preparing the field map. # The default value is 0.6. fmap_skullstrip_AFNI_threshold: 0.6 - + Blip-FSL-TOPUP: - + # (approximate) resolution (in mm) of warp basis for the different sub-sampling levels, default 10 warpres: 10 - + # sub-sampling scheme, default 1 subsamp: 1 - + # FWHM (in mm) of gaussian smoothing kernel, default 8 fwhm: 8 - + # Max # of non-linear iterations, default 5 miter: 5 - + # Weight of regularisation, default depending on --ssqlambda and --regmod switches. See user documentation. lambda: 1 - + # If set (=1), lambda is weighted by current ssq, default 1 ssqlambda: 1 - + # Model for regularisation of warp-field [membrane_energy bending_energy], default bending_energy regmod: bending_energy - + # Estimate movements if set, default 1 (true) estmov: 1 - + # Minimisation method 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient, default 0 (LM) minmet: 0 - + # Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3 splineorder: 3 - + # Precision for representing Hessian, double or float. Default double numprec: double - + # Image interpolation model, linear or spline. Default spline interp: spline - + # If set (=1), the images are individually scaled to a common mean, default 0 (false) scale: 0 - + # If set (=1), the calculations are done in a different grid, default 1 (true) regrid: 1 @@ -1272,7 +1272,7 @@ functional_preproc: functional_mean_boolean: Off # Set an intensity threshold to improve skull stripping performances of FSL BET on rodent scans. - functional_mean_thr: + functional_mean_thr: run: Off threshold_value: 98 @@ -1340,7 +1340,7 @@ functional_preproc: # Normalize functional image run: On - + coreg_prep: # Generate sbref diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py index 40a598cfab..e73a4d7bc0 100644 --- a/CPAC/resources/tests/test_templates.py +++ b/CPAC/resources/tests/test_templates.py @@ -15,28 +15,37 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Tests for packaged templates""" + import os + import pytest + from CPAC.pipeline import ALL_PIPELINE_CONFIGS from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool from CPAC.utils.configuration import Preconfiguration from CPAC.utils.datasource import get_highest_local_res -@pytest.mark.parametrize('pipeline', ALL_PIPELINE_CONFIGS) +@pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS) def test_packaged_path_exists(pipeline): """ Check that all local templates are included in image at at least one resolution """ - rpool = ingress_pipeconfig_paths(Preconfiguration(pipeline), - ResourcePool(), 'pytest') + rpool = ingress_pipeconfig_paths( + Preconfiguration(pipeline), ResourcePool(), "pytest" + ) for resource in rpool.rpool.values(): - node = list(resource.values())[0].get('data')[0] - if hasattr(node.inputs, 'template' - ) and not node.inputs.template.startswith('s3:'): - if not pipeline == 'rodent' and node.inputs.template.startswith( - '/template/study_based'): - assert (os.path.exists(node.inputs.template) or - get_highest_local_res(node.inputs.template, - node.inputs.resolution).exists()) + node = list(resource.values())[0].get("data")[0] + if hasattr(node.inputs, "template") and not node.inputs.template.startswith( + "s3:" + ): + if not pipeline == "rodent" and node.inputs.template.startswith( + "/template/study_based" + ): + assert ( + os.path.exists(node.inputs.template) + or get_highest_local_res( + node.inputs.template, node.inputs.resolution + ).exists() + ) diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 0a45ae37b7..439d09ab61 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -16,21 +16,22 @@ # License along with C-PAC. If not, see . import csv import json -import re from pathlib import Path +import re from typing import Union + from nipype import logging from nipype.interfaces import utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.resources.templates.lookup_table import format_identifier, \ - lookup_identifier +from CPAC.resources.templates.lookup_table import format_identifier, lookup_identifier from CPAC.utils import function from CPAC.utils.bids_utils import bids_remove_entity from CPAC.utils.interfaces.function import Function from CPAC.utils.typing import TUPLE from CPAC.utils.utils import get_scan_params -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") def bidsier_prefix(unique_id): @@ -56,13 +57,13 @@ def bidsier_prefix(unique_id): >>> bidsier_prefix('01_ses-1') 'sub-01_ses-1' """ - keys = ['sub', 'ses'] - components = unique_id.split('_') + keys = ["sub", "ses"] + components = unique_id.split("_") for i, component in enumerate(components): if i < len(keys): if not component.startswith(keys[i]): - components[i] = '-'.join([keys[i], component]) - return '_'.join(components) + components[i] = "-".join([keys[i], component]) + return "_".join(components) def get_rest(scan, rest_dict, resource="scan"): @@ -99,35 +100,36 @@ def select_model_files(model, ftest, model_name): """ Method to select model files """ - - import os import glob + import os - files = glob.glob(os.path.join(model, '*')) + files = glob.glob(os.path.join(model, "*")) if len(files) == 0: raise Exception("No files found inside directory %s" % model) - fts_file = '' + fts_file = "" for filename in files: - if (model_name + '.mat') in filename: + if (model_name + ".mat") in filename: mat_file = filename - elif (model_name + '.grp') in filename: + elif (model_name + ".grp") in filename: grp_file = filename - elif ((model_name + '.fts') in filename) and ftest: + elif ((model_name + ".fts") in filename) and ftest: fts_file = filename - elif (model_name + '.con') in filename: + elif (model_name + ".con") in filename: con_file = filename - if ftest == True and fts_file == '': - errmsg = "\n[!] CPAC says: You have f-tests included in your group " \ - "analysis model '%s', but no .fts files were found in the " \ - "output folder specified for group analysis: %s.\n\nThe " \ - ".fts file is automatically generated by CPAC, and if you " \ - "are seeing this error, it is because something went wrong " \ - "with the generation of this file, or it has been moved." \ - "\n\n" % (model_name, model) + if ftest == True and fts_file == "": + errmsg = ( + "\n[!] CPAC says: You have f-tests included in your group " + "analysis model '%s', but no .fts files were found in the " + "output folder specified for group analysis: %s.\n\nThe " + ".fts file is automatically generated by CPAC, and if you " + "are seeing this error, it is because something went wrong " + "with the generation of this file, or it has been moved." + "\n\n" % (model_name, model) + ) raise Exception(errmsg) @@ -136,223 +138,252 @@ def select_model_files(model, ftest, model_name): def check_func_scan(func_scan_dct, scan): """Run some checks on the functional timeseries-related files for a given - series/scan name or label.""" - + series/scan name or label. + """ scan_resources = func_scan_dct[scan] try: scan_resources.keys() except AttributeError: - err = "\n[!] The data configuration file you provided is " \ - "missing a level under the 'func:' key. CPAC versions " \ - "1.2 and later use data configurations with an " \ - "additional level of nesting.\n\nExample\nfunc:\n " \ - "rest01:\n scan: /path/to/rest01_func.nii.gz\n" \ - " scan parameters: /path/to/scan_params.json\n\n" \ - "See the User Guide for more information.\n\n" + err = ( + "\n[!] The data configuration file you provided is " + "missing a level under the 'func:' key. CPAC versions " + "1.2 and later use data configurations with an " + "additional level of nesting.\n\nExample\nfunc:\n " + "rest01:\n scan: /path/to/rest01_func.nii.gz\n" + " scan parameters: /path/to/scan_params.json\n\n" + "See the User Guide for more information.\n\n" + ) raise Exception(err) # actual 4D time series file if "scan" not in scan_resources.keys(): - err = "\n\n[!] The {0} scan is missing its actual time-series " \ - "scan file, which should be a filepath labeled with the " \ - "'scan' key.\n\n".format(scan) + err = ( + f"\n\n[!] The {scan} scan is missing its actual time-series " + "scan file, which should be a filepath labeled with the " + "'scan' key.\n\n" + ) raise Exception(err) # Nipype restriction (may have changed) - if '.' in scan or '+' in scan or '*' in scan: - raise Exception('\n\n[!] Scan names cannot contain any special ' - 'characters (., +, *, etc.). Please update this ' - 'and try again.\n\nScan: {0}' - '\n\n'.format(scan)) + if "." in scan or "+" in scan or "*" in scan: + raise Exception( + "\n\n[!] Scan names cannot contain any special " + "characters (., +, *, etc.). Please update this " + f"and try again.\n\nScan: {scan}" + "\n\n" + ) -def create_func_datasource(rest_dict, rpool, wf_name='func_datasource'): +def create_func_datasource(rest_dict, rpool, wf_name="func_datasource"): """Return the functional timeseries-related file paths for each series/scan, from the dictionary of functional files described in the data configuration (sublist) YAML file. Scan input (from inputnode) is an iterable. """ - from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe + wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['subject', 'scan', 'creds_path', 'dl_dir'], - mandatory_inputs=True), - name='inputnode') + inputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "scan", "creds_path", "dl_dir"], mandatory_inputs=True + ), + name="inputnode", + ) - outputnode = pe.Node(util.IdentityInterface(fields=['subject', 'rest', - 'scan', 'scan_params', - 'phase_diff', - 'magnitude']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "rest", "scan", "scan_params", "phase_diff", "magnitude"] + ), + name="outputspec", + ) # have this here for now because of the big change in the data - # configuration format + # configuration format # (Not necessary with ingress - format does not comply) - if not rpool.check_rpool('derivatives-dir'): - check_scan = pe.Node(function.Function(input_names=['func_scan_dct', - 'scan'], - output_names=[], - function=check_func_scan, - as_module=True), - name='check_func_scan') + if not rpool.check_rpool("derivatives-dir"): + check_scan = pe.Node( + function.Function( + input_names=["func_scan_dct", "scan"], + output_names=[], + function=check_func_scan, + as_module=True, + ), + name="check_func_scan", + ) check_scan.inputs.func_scan_dct = rest_dict - wf.connect(inputnode, 'scan', check_scan, 'scan') - + wf.connect(inputnode, "scan", check_scan, "scan") # get the functional scan itself - selectrest = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='selectrest') + selectrest = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="selectrest", + ) selectrest.inputs.rest_dict = rest_dict selectrest.inputs.resource = "scan" - wf.connect(inputnode, 'scan', selectrest, 'scan') + wf.connect(inputnode, "scan", selectrest, "scan") # check to see if it's on an Amazon AWS S3 bucket, and download it, if it # is - otherwise, just return the local file path - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') - - wf.connect(selectrest, 'file_path', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'func' - - wf.connect(inputnode, 'subject', outputnode, 'subject') - wf.connect(check_s3_node, 'local_path', outputnode, 'rest') - wf.connect(inputnode, 'scan', outputnode, 'scan') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) + + wf.connect(selectrest, "file_path", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "func" + + wf.connect(inputnode, "subject", outputnode, "subject") + wf.connect(check_s3_node, "local_path", outputnode, "rest") + wf.connect(inputnode, "scan", outputnode, "scan") # scan parameters CSV - select_scan_params = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='select_scan_params') + select_scan_params = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="select_scan_params", + ) select_scan_params.inputs.rest_dict = rest_dict select_scan_params.inputs.resource = "scan_parameters" - wf.connect(inputnode, 'scan', select_scan_params, 'scan') + wf.connect(inputnode, "scan", select_scan_params, "scan") # if the scan parameters file is on AWS S3, download it - s3_scan_params = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='s3_scan_params') - - wf.connect(select_scan_params, 'file_path', s3_scan_params, 'file_path') - wf.connect(inputnode, 'creds_path', s3_scan_params, 'creds_path') - wf.connect(inputnode, 'dl_dir', s3_scan_params, 'dl_dir') - wf.connect(s3_scan_params, 'local_path', outputnode, 'scan_params') + s3_scan_params = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="s3_scan_params", + ) + + wf.connect(select_scan_params, "file_path", s3_scan_params, "file_path") + wf.connect(inputnode, "creds_path", s3_scan_params, "creds_path") + wf.connect(inputnode, "dl_dir", s3_scan_params, "dl_dir") + wf.connect(s3_scan_params, "local_path", outputnode, "scan_params") return wf -def create_fmap_datasource(fmap_dct, wf_name='fmap_datasource'): +def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): """Return the field map files, from the dictionary of functional files described in the data configuration (sublist) YAML file. """ + import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe - import nipype.interfaces.utility as util wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['subject', 'scan', 'creds_path', 'dl_dir'], - mandatory_inputs=True), - name='inputnode') - - outputnode = pe.Node(util.IdentityInterface(fields=['subject', 'rest', - 'scan', 'scan_params', - 'phase_diff', - 'magnitude']), - name='outputspec') - - selectrest = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='selectrest') + inputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "scan", "creds_path", "dl_dir"], mandatory_inputs=True + ), + name="inputnode", + ) + + outputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "rest", "scan", "scan_params", "phase_diff", "magnitude"] + ), + name="outputspec", + ) + + selectrest = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="selectrest", + ) selectrest.inputs.rest_dict = fmap_dct selectrest.inputs.resource = "scan" - wf.connect(inputnode, 'scan', selectrest, 'scan') + wf.connect(inputnode, "scan", selectrest, "scan") # check to see if it's on an Amazon AWS S3 bucket, and download it, if it # is - otherwise, just return the local file path - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') - - wf.connect(selectrest, 'file_path', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'other' - - wf.connect(inputnode, 'subject', outputnode, 'subject') - wf.connect(check_s3_node, 'local_path', outputnode, 'rest') - wf.connect(inputnode, 'scan', outputnode, 'scan') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) + + wf.connect(selectrest, "file_path", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "other" + + wf.connect(inputnode, "subject", outputnode, "subject") + wf.connect(check_s3_node, "local_path", outputnode, "rest") + wf.connect(inputnode, "scan", outputnode, "scan") # scan parameters CSV - select_scan_params = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='select_scan_params') + select_scan_params = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="select_scan_params", + ) select_scan_params.inputs.rest_dict = fmap_dct select_scan_params.inputs.resource = "scan_parameters" - wf.connect(inputnode, 'scan', select_scan_params, 'scan') + wf.connect(inputnode, "scan", select_scan_params, "scan") # if the scan parameters file is on AWS S3, download it - s3_scan_params = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='s3_scan_params') - - wf.connect(select_scan_params, 'file_path', s3_scan_params, 'file_path') - wf.connect(inputnode, 'creds_path', s3_scan_params, 'creds_path') - wf.connect(inputnode, 'dl_dir', s3_scan_params, 'dl_dir') - wf.connect(s3_scan_params, 'local_path', outputnode, 'scan_params') + s3_scan_params = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="s3_scan_params", + ) + + wf.connect(select_scan_params, "file_path", s3_scan_params, "file_path") + wf.connect(inputnode, "creds_path", s3_scan_params, "creds_path") + wf.connect(inputnode, "dl_dir", s3_scan_params, "dl_dir") + wf.connect(s3_scan_params, "local_path", outputnode, "scan_params") return wf def get_fmap_phasediff_metadata(data_config_scan_params): - if (not isinstance(data_config_scan_params, dict) and - ".json" in data_config_scan_params): - with open(data_config_scan_params, 'r', encoding='utf-8') as _f: + if ( + not isinstance(data_config_scan_params, dict) + and ".json" in data_config_scan_params + ): + with open(data_config_scan_params, "r", encoding="utf-8") as _f: data_config_scan_params = json.load(_f) echo_time = None @@ -360,20 +391,30 @@ def get_fmap_phasediff_metadata(data_config_scan_params): echo_time_two = None if "EchoTime" in data_config_scan_params: echo_time = data_config_scan_params.get("EchoTime") - elif "EchoTime1" in data_config_scan_params and "EchoTime2" \ - in data_config_scan_params: + elif ( + "EchoTime1" in data_config_scan_params + and "EchoTime2" in data_config_scan_params + ): echo_time_one = data_config_scan_params.get("EchoTime1") echo_time_two = data_config_scan_params.get("EchoTime2") dwell_time = data_config_scan_params.get("DwellTime") pe_direction = data_config_scan_params.get("PhaseEncodingDirection") total_readout = data_config_scan_params.get("TotalReadoutTime") - return (dwell_time, pe_direction, total_readout, echo_time, - echo_time_one, echo_time_two) + return ( + dwell_time, + pe_direction, + total_readout, + echo_time, + echo_time_one, + echo_time_two, + ) -@Function.sig_imports(['from CPAC.utils.typing import TUPLE']) -def calc_delta_te_and_asym_ratio(effective_echo_spacing: float, - echo_times: list) -> TUPLE[float, float]: + +@Function.sig_imports(["from CPAC.utils.typing import TUPLE"]) +def calc_delta_te_and_asym_ratio( + effective_echo_spacing: float, echo_times: list +) -> TUPLE[float, float]: """Calcluate ``deltaTE`` and ``ees_asym_ratio`` from given metadata Parameters @@ -390,9 +431,11 @@ def calc_delta_te_and_asym_ratio(effective_echo_spacing: float, ees_asym_ratio : float """ if not isinstance(effective_echo_spacing, float): - raise LookupError('C-PAC could not find `EffectiveEchoSpacing` in ' - 'either fmap or func sidecar JSON, but that field ' - 'is required for PhaseDiff distortion correction.') + raise LookupError( + "C-PAC could not find `EffectiveEchoSpacing` in " + "either fmap or func sidecar JSON, but that field " + "is required for PhaseDiff distortion correction." + ) # convert into milliseconds if necessary # these values will/should never be more than 10ms @@ -401,7 +444,7 @@ def calc_delta_te_and_asym_ratio(effective_echo_spacing: float, echo_times[1] = echo_times[1] * 1000 deltaTE = abs(echo_times[0] - echo_times[1]) - ees_asym_ratio = (effective_echo_spacing / deltaTE) + ees_asym_ratio = effective_echo_spacing / deltaTE return deltaTE, ees_asym_ratio @@ -410,14 +453,21 @@ def gather_echo_times(echotime_1, echotime_2=None, echotime_3=None, echotime_4=N echotime_list = list(filter(lambda item: item is not None, echotime_list)) echotime_list = list(set(echotime_list)) if len(echotime_list) != 2: - raise Exception("\n[!] Something went wrong with the field map echo " - "times - there should be two distinct values.\n\n" - f"Echo Times:\n{echotime_list}\n") + raise Exception( + "\n[!] Something went wrong with the field map echo " + "times - there should be two distinct values.\n\n" + f"Echo Times:\n{echotime_list}\n" + ) return echotime_list -def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, - epi_fmap_two=None, epi_fmap_params_two=None): +def match_epi_fmaps( + bold_pedir, + epi_fmap_one, + epi_fmap_params_one, + epi_fmap_two=None, + epi_fmap_params_two=None, +): """Parse the field map files in the data configuration and determine which ones have the same and opposite phase-encoding directions as the BOLD scan in the current pipeline. @@ -438,7 +488,6 @@ def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, 2. Check whether there are one or two EPI's in the field map data. 3. Grab the one or two EPI field maps. """ - fmap_dct = {epi_fmap_one: epi_fmap_params_one} if epi_fmap_two and epi_fmap_params_two: fmap_dct[epi_fmap_two] = epi_fmap_params_two @@ -449,7 +498,7 @@ def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, for epi_scan in fmap_dct.keys(): scan_params = fmap_dct[epi_scan] if not isinstance(scan_params, dict) and ".json" in scan_params: - with open(scan_params, 'r') as f: + with open(scan_params, "r") as f: scan_params = json.load(f) if "PhaseEncodingDirection" in scan_params: epi_pedir = scan_params["PhaseEncodingDirection"] @@ -461,12 +510,20 @@ def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, return (opposite_pe_epi, same_pe_epi) -def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, - input_creds_path, unique_id=None, num_strat=None): - name_suffix = '' +def ingress_func_metadata( + wf, + cfg, + rpool, + sub_dict, + subject_id, + input_creds_path, + unique_id=None, + num_strat=None, +): + name_suffix = "" for suffix_part in (unique_id, num_strat): if suffix_part is not None: - name_suffix += f'_{suffix_part}' + name_suffix += f"_{suffix_part}" # Grab field maps diff = False blip = False @@ -476,42 +533,58 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, second = False for key in sub_dict["fmap"]: gather_fmap = create_fmap_datasource( - sub_dict["fmap"], f"fmap_gather_{key}_{subject_id}") + sub_dict["fmap"], f"fmap_gather_{key}_{subject_id}" + ) gather_fmap.inputs.inputnode.set( - subject=subject_id, creds_path=input_creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path']) + subject=subject_id, + creds_path=input_creds_path, + dl_dir=cfg.pipeline_setup["working_directory"]["path"], + ) gather_fmap.inputs.inputnode.scan = key orig_key = key - if 'epi' in key and not second: - key = 'epi-1' + if "epi" in key and not second: + key = "epi-1" second = True - elif 'epi' in key and second: - key = 'epi-2' - - rpool.set_data(key, gather_fmap, 'outputspec.rest', {}, "", - "fmap_ingress") - rpool.set_data(f'{key}-scan-params', gather_fmap, - 'outputspec.scan_params', {}, "", - "fmap_params_ingress") + elif "epi" in key and second: + key = "epi-2" + + rpool.set_data(key, gather_fmap, "outputspec.rest", {}, "", "fmap_ingress") + rpool.set_data( + f"{key}-scan-params", + gather_fmap, + "outputspec.scan_params", + {}, + "", + "fmap_params_ingress", + ) fmap_rp_list.append(key) - get_fmap_metadata_imports = ['import json'] - get_fmap_metadata = pe.Node(Function( - input_names=['data_config_scan_params'], - output_names=['dwell_time', - 'pe_direction', - 'total_readout', - 'echo_time', - 'echo_time_one', - 'echo_time_two'], - function=get_fmap_phasediff_metadata, - imports=get_fmap_metadata_imports), - name=f'{key}_get_metadata{name_suffix}') - - wf.connect(gather_fmap, 'outputspec.scan_params', - get_fmap_metadata, 'data_config_scan_params') + get_fmap_metadata_imports = ["import json"] + get_fmap_metadata = pe.Node( + Function( + input_names=["data_config_scan_params"], + output_names=[ + "dwell_time", + "pe_direction", + "total_readout", + "echo_time", + "echo_time_one", + "echo_time_two", + ], + function=get_fmap_phasediff_metadata, + imports=get_fmap_metadata_imports, + ), + name=f"{key}_get_metadata{name_suffix}", + ) + + wf.connect( + gather_fmap, + "outputspec.scan_params", + get_fmap_metadata, + "data_config_scan_params", + ) if "phase" in key: # leave it open to all three options, in case there is a @@ -521,233 +594,302 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, # at least one of these rpool keys will have a None value, # which will be sorted out in gather_echo_times below - rpool.set_data(f'{key}-TE', get_fmap_metadata, 'echo_time', - {}, "", "fmap_TE_ingress") + rpool.set_data( + f"{key}-TE", + get_fmap_metadata, + "echo_time", + {}, + "", + "fmap_TE_ingress", + ) fmap_TE_list.append(f"{key}-TE") - rpool.set_data(f'{key}-TE1', - get_fmap_metadata, 'echo_time_one', - {}, "", "fmap_TE1_ingress") + rpool.set_data( + f"{key}-TE1", + get_fmap_metadata, + "echo_time_one", + {}, + "", + "fmap_TE1_ingress", + ) fmap_TE_list.append(f"{key}-TE1") - rpool.set_data(f'{key}-TE2', - get_fmap_metadata, 'echo_time_two', - {}, "", "fmap_TE2_ingress") + rpool.set_data( + f"{key}-TE2", + get_fmap_metadata, + "echo_time_two", + {}, + "", + "fmap_TE2_ingress", + ) fmap_TE_list.append(f"{key}-TE2") elif "magnitude" in key: - rpool.set_data(f'{key}-TE', get_fmap_metadata, 'echo_time', - {}, "", "fmap_TE_ingress") + rpool.set_data( + f"{key}-TE", + get_fmap_metadata, + "echo_time", + {}, + "", + "fmap_TE_ingress", + ) fmap_TE_list.append(f"{key}-TE") - rpool.set_data(f'{key}-dwell', get_fmap_metadata, - 'dwell_time', {}, "", "fmap_dwell_ingress") - rpool.set_data(f'{key}-pedir', get_fmap_metadata, - 'pe_direction', {}, "", "fmap_pedir_ingress") - rpool.set_data(f'{key}-total-readout', get_fmap_metadata, - 'total_readout', {}, "", "fmap_readout_ingress") + rpool.set_data( + f"{key}-dwell", + get_fmap_metadata, + "dwell_time", + {}, + "", + "fmap_dwell_ingress", + ) + rpool.set_data( + f"{key}-pedir", + get_fmap_metadata, + "pe_direction", + {}, + "", + "fmap_pedir_ingress", + ) + rpool.set_data( + f"{key}-total-readout", + get_fmap_metadata, + "total_readout", + {}, + "", + "fmap_readout_ingress", + ) - if 'phase' in key or 'mag' in key: + if "phase" in key or "mag" in key: diff = True - if re.match('epi_[AP]{2}', orig_key): + if re.match("epi_[AP]{2}", orig_key): blip = True if diff: - calc_delta_ratio = pe.Node(Function( - input_names=['effective_echo_spacing', - 'echo_times'], - output_names=['deltaTE', - 'ees_asym_ratio'], - function=calc_delta_te_and_asym_ratio, - imports=['from typing import Optional, Tuple']), - name=f'diff_distcor_calc_delta{name_suffix}') - - gather_echoes = pe.Node(Function( - input_names=['echotime_1', - 'echotime_2', - 'echotime_3', - 'echotime_4'], - output_names=['echotime_list'], - function=gather_echo_times), - name='fugue_gather_echo_times') + calc_delta_ratio = pe.Node( + Function( + input_names=["effective_echo_spacing", "echo_times"], + output_names=["deltaTE", "ees_asym_ratio"], + function=calc_delta_te_and_asym_ratio, + imports=["from typing import Optional, Tuple"], + ), + name=f"diff_distcor_calc_delta{name_suffix}", + ) + + gather_echoes = pe.Node( + Function( + input_names=[ + "echotime_1", + "echotime_2", + "echotime_3", + "echotime_4", + ], + output_names=["echotime_list"], + function=gather_echo_times, + ), + name="fugue_gather_echo_times", + ) for idx, fmap_file in enumerate(fmap_TE_list, start=1): try: node, out_file = rpool.get(fmap_file)[ - f"['{fmap_file}:fmap_TE_ingress']"]['data'] - wf.connect(node, out_file, gather_echoes, - f'echotime_{idx}') + f"['{fmap_file}:fmap_TE_ingress']" + ]["data"] + wf.connect(node, out_file, gather_echoes, f"echotime_{idx}") except KeyError: pass - wf.connect(gather_echoes, 'echotime_list', - calc_delta_ratio, 'echo_times') + wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times") # Add in nodes to get parameters from configuration file # a node which checks if scan_parameters are present for each scan - scan_params = pe.Node(Function( - input_names=['data_config_scan_params', - 'subject_id', - 'scan', - 'pipeconfig_tr', - 'pipeconfig_tpattern', - 'pipeconfig_start_indx', - 'pipeconfig_stop_indx'], - output_names=['tr', - 'tpattern', - 'template', - 'ref_slice', - 'start_indx', - 'stop_indx', - 'pe_direction', - 'effective_echo_spacing'], - function=get_scan_params, - imports=['from CPAC.utils.utils import check, try_fetch_parameter'] - ), name=f"bold_scan_params_{subject_id}{name_suffix}") + scan_params = pe.Node( + Function( + input_names=[ + "data_config_scan_params", + "subject_id", + "scan", + "pipeconfig_tr", + "pipeconfig_tpattern", + "pipeconfig_start_indx", + "pipeconfig_stop_indx", + ], + output_names=[ + "tr", + "tpattern", + "template", + "ref_slice", + "start_indx", + "stop_indx", + "pe_direction", + "effective_echo_spacing", + ], + function=get_scan_params, + imports=["from CPAC.utils.utils import check, try_fetch_parameter"], + ), + name=f"bold_scan_params_{subject_id}{name_suffix}", + ) scan_params.inputs.subject_id = subject_id scan_params.inputs.set( - pipeconfig_start_indx=cfg.functional_preproc['truncation'][ - 'start_tr'], - pipeconfig_stop_indx=cfg.functional_preproc['truncation']['stop_tr']) + pipeconfig_start_indx=cfg.functional_preproc["truncation"]["start_tr"], + pipeconfig_stop_indx=cfg.functional_preproc["truncation"]["stop_tr"], + ) - node, out = rpool.get('scan')["['scan:func_ingress']"]['data'] - wf.connect(node, out, scan_params, 'scan') + node, out = rpool.get("scan")["['scan:func_ingress']"]["data"] + wf.connect(node, out, scan_params, "scan") # Workaround for extracting metadata with ingress - if rpool.check_rpool('derivatives-dir'): - selectrest_json = pe.Node(function.Function(input_names=['scan', - 'rest_dict', - 'resource'], - output_names=['file_path'], - function=get_rest, - as_module=True), - name='selectrest_json') + if rpool.check_rpool("derivatives-dir"): + selectrest_json = pe.Node( + function.Function( + input_names=["scan", "rest_dict", "resource"], + output_names=["file_path"], + function=get_rest, + as_module=True, + ), + name="selectrest_json", + ) selectrest_json.inputs.rest_dict = sub_dict selectrest_json.inputs.resource = "scan_parameters" - wf.connect(node, out, selectrest_json, 'scan') - wf.connect(selectrest_json, 'file_path', scan_params, 'data_config_scan_params') - + wf.connect(node, out, selectrest_json, "scan") + wf.connect(selectrest_json, "file_path", scan_params, "data_config_scan_params") + else: # wire in the scan parameter workflow - node, out = rpool.get('scan-params')[ - "['scan-params:scan_params_ingress']"]['data'] - wf.connect(node, out, scan_params, 'data_config_scan_params') - - rpool.set_data('TR', scan_params, 'tr', {}, "", "func_metadata_ingress") - rpool.set_data('tpattern', scan_params, 'tpattern', {}, "", - "func_metadata_ingress") - rpool.set_data('template', scan_params, 'template', {}, "", - "func_metadata_ingress") - rpool.set_data('start-tr', scan_params, 'start_indx', {}, "", - "func_metadata_ingress") - rpool.set_data('stop-tr', scan_params, 'stop_indx', {}, "", - "func_metadata_ingress") - rpool.set_data('pe-direction', scan_params, 'pe_direction', {}, "", - "func_metadata_ingress") + node, out = rpool.get("scan-params")["['scan-params:scan_params_ingress']"][ + "data" + ] + wf.connect(node, out, scan_params, "data_config_scan_params") + + rpool.set_data("TR", scan_params, "tr", {}, "", "func_metadata_ingress") + rpool.set_data("tpattern", scan_params, "tpattern", {}, "", "func_metadata_ingress") + rpool.set_data("template", scan_params, "template", {}, "", "func_metadata_ingress") + rpool.set_data( + "start-tr", scan_params, "start_indx", {}, "", "func_metadata_ingress" + ) + rpool.set_data("stop-tr", scan_params, "stop_indx", {}, "", "func_metadata_ingress") + rpool.set_data( + "pe-direction", scan_params, "pe_direction", {}, "", "func_metadata_ingress" + ) if diff: # Connect EffectiveEchoSpacing from functional metadata - rpool.set_data('effectiveEchoSpacing', scan_params, - 'effective_echo_spacing', {}, '', - 'func_metadata_ingress') - node, out_file = rpool.get('effectiveEchoSpacing')[ - "['effectiveEchoSpacing:func_metadata_ingress']"]['data'] - wf.connect(node, out_file, calc_delta_ratio, 'effective_echo_spacing') - rpool.set_data('deltaTE', calc_delta_ratio, 'deltaTE', {}, '', - 'deltaTE_ingress') - rpool.set_data('ees-asym-ratio', calc_delta_ratio, - 'ees_asym_ratio', {}, '', - 'ees_asym_ratio_ingress') + rpool.set_data( + "effectiveEchoSpacing", + scan_params, + "effective_echo_spacing", + {}, + "", + "func_metadata_ingress", + ) + node, out_file = rpool.get("effectiveEchoSpacing")[ + "['effectiveEchoSpacing:func_metadata_ingress']" + ]["data"] + wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing") + rpool.set_data( + "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress" + ) + rpool.set_data( + "ees-asym-ratio", + calc_delta_ratio, + "ees_asym_ratio", + {}, + "", + "ees_asym_ratio_ingress", + ) return wf, rpool, diff, blip, fmap_rp_list def create_general_datasource(wf_name): - from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe + wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['unique_id', 'data', 'scan', 'creds_path', - 'dl_dir'], - mandatory_inputs=True), - name='inputnode') - - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') + inputnode = pe.Node( + util.IdentityInterface( + fields=["unique_id", "data", "scan", "creds_path", "dl_dir"], + mandatory_inputs=True, + ), + name="inputnode", + ) + + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) check_s3_node.inputs.img_type = "other" - wf.connect(inputnode, 'data', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') + wf.connect(inputnode, "data", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") - outputnode = pe.Node(util.IdentityInterface(fields=['unique_id', - 'data', - 'scan']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["unique_id", "data", "scan"]), name="outputspec" + ) - wf.connect(inputnode, 'unique_id', outputnode, 'unique_id') - wf.connect(inputnode, 'scan', outputnode, 'scan') - wf.connect(check_s3_node, 'local_path', outputnode, 'data') + wf.connect(inputnode, "unique_id", outputnode, "unique_id") + wf.connect(inputnode, "scan", outputnode, "scan") + wf.connect(check_s3_node, "local_path", outputnode, "data") return wf -def create_check_for_s3_node(name, file_path, img_type='other', - creds_path=None, dl_dir=None, map_node=False): +def create_check_for_s3_node( + name, file_path, img_type="other", creds_path=None, dl_dir=None, map_node=False +): if map_node: - check_s3_node = pe.MapNode(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=[ - 'local_path'], - function=check_for_s3, - as_module=True), - iterfield=['file_path'], - name='check_for_s3_%s' % name) + check_s3_node = pe.MapNode( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + iterfield=["file_path"], + name="check_for_s3_%s" % name, + ) else: - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3_%s' % name) + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3_%s" % name, + ) check_s3_node.inputs.set( - file_path=file_path, - creds_path=creds_path, - dl_dir=dl_dir, - img_type=img_type + file_path=file_path, creds_path=creds_path, dl_dir=dl_dir, img_type=img_type ) return check_s3_node # Check if passed-in file is on S3 -def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', - verbose=False): +def check_for_s3( + file_path, creds_path=None, dl_dir=None, img_type="other", verbose=False +): # Import packages import os - import nibabel as nib + import botocore.exceptions + import nibabel as nib from indi_aws import fetch_creds # Init variables - s3_str = 's3://' + s3_str = "s3://" if creds_path: - if "None" in creds_path or "none" in creds_path or \ - "null" in creds_path: + if "None" in creds_path or "none" in creds_path or "null" in creds_path: creds_path = None if dl_dir is None: @@ -765,14 +907,13 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', return local_path if file_path.lower().startswith(s3_str): - - file_path = s3_str + file_path[len(s3_str):] + file_path = s3_str + file_path[len(s3_str) :] # Get bucket name and bucket object - bucket_name = file_path[len(s3_str):].split('/')[0] + bucket_name = file_path[len(s3_str) :].split("/")[0] # Extract relative key path from bucket and local path s3_prefix = s3_str + bucket_name - s3_key = file_path[len(s3_prefix) + 1:] + s3_key = file_path[len(s3_prefix) + 1 :] local_path = os.path.join(dl_dir, bucket_name, s3_key) # Get local directory and create folders if they dont exist @@ -781,35 +922,41 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', os.makedirs(local_dir, exist_ok=True) if os.path.exists(local_path): - print("{0} already exists- skipping download.".format(local_path)) + print(f"{local_path} already exists- skipping download.") else: # Download file try: bucket = fetch_creds.return_bucket(creds_path, bucket_name) - print("Attempting to download from AWS S3: {0}".format( - file_path)) + print(f"Attempting to download from AWS S3: {file_path}") bucket.download_file(Key=s3_key, Filename=local_path) except botocore.exceptions.ClientError as exc: - error_code = int(exc.response['Error']['Code']) + error_code = int(exc.response["Error"]["Code"]) err_msg = str(exc) if error_code == 403: - err_msg = 'Access to bucket: "%s" is denied; using credentials ' \ - 'in subject list: "%s"; cannot access the file "%s"' \ - % (bucket_name, creds_path, file_path) + err_msg = ( + 'Access to bucket: "%s" is denied; using credentials ' + 'in subject list: "%s"; cannot access the file "%s"' + % (bucket_name, creds_path, file_path) + ) elif error_code == 404: - err_msg = 'File: {0} does not exist; check spelling and try ' \ - 'again'.format( - os.path.join(bucket_name, s3_key)) + err_msg = ( + f"File: {os.path.join(bucket_name, s3_key)} does not exist; check spelling and try " + "again" + ) else: - err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' \ - % (bucket_name, exc) + err_msg = ( + 'Unable to connect to bucket: "%s". Error message:\n%s' + % (bucket_name, exc) + ) raise Exception(err_msg) except Exception as exc: - err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' \ - % (bucket_name, exc) + err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' % ( + bucket_name, + exc, + ) raise Exception(err_msg) # Otherwise just return what was passed in, resolving if a link @@ -821,52 +968,61 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', # alert users to 2020-07-20 Neuroparc atlas update (v0 to v1) ndmg_atlases = {} with open( - os.path.join( - os.path.dirname(os.path.dirname(__file__)), - 'resources/templates/ndmg_atlases.csv' - ) + os.path.join( + os.path.dirname(os.path.dirname(__file__)), + "resources/templates/ndmg_atlases.csv", + ) ) as ndmg_atlases_file: - ndmg_atlases['v0'], ndmg_atlases['v1'] = zip(*[( - f'/ndmg_atlases/label/Human/{atlas[0]}', - f'/ndmg_atlases/label/Human/{atlas[1]}' - ) for atlas in - csv.reader( - ndmg_atlases_file)]) - if local_path in ndmg_atlases['v0']: + ndmg_atlases["v0"], ndmg_atlases["v1"] = zip( + *[ + ( + f"/ndmg_atlases/label/Human/{atlas[0]}", + f"/ndmg_atlases/label/Human/{atlas[1]}", + ) + for atlas in csv.reader(ndmg_atlases_file) + ] + ) + if local_path in ndmg_atlases["v0"]: raise FileNotFoundError( - ''.join([ - 'Neuroparc atlas paths were updated on July 20, 2020. ' - 'C-PAC configuration files using Neuroparc v0 atlas paths ' - '(including C-PAC default and preconfigured pipeline ' - 'configurations from v1.6.2a and earlier) need to be ' - 'updated to use Neuroparc atlases. Your current ' - 'configuration includes the Neuroparc v0 path ' - f'{local_path} which needs to be updated to ', - ndmg_atlases['v1'][ndmg_atlases['v0'].index(local_path)], - '. For a full list such paths, see https://fcp-indi.' - 'github.io/docs/nightly/user/ndmg_atlases' - ]) + "".join( + [ + "Neuroparc atlas paths were updated on July 20, 2020. " + "C-PAC configuration files using Neuroparc v0 atlas paths " + "(including C-PAC default and preconfigured pipeline " + "configurations from v1.6.2a and earlier) need to be " + "updated to use Neuroparc atlases. Your current " + "configuration includes the Neuroparc v0 path " + f"{local_path} which needs to be updated to ", + ndmg_atlases["v1"][ndmg_atlases["v0"].index(local_path)], + ". For a full list such paths, see https://fcp-indi." + "github.io/docs/nightly/user/ndmg_atlases", + ] + ) ) else: - raise FileNotFoundError(f'File {local_path} does not exist!') + raise FileNotFoundError(f"File {local_path} does not exist!") if verbose: - print("Downloaded file:\n{0}\n".format(local_path)) + print(f"Downloaded file:\n{local_path}\n") # Check image dimensionality - if local_path.endswith('.nii') or local_path.endswith('.nii.gz'): + if local_path.endswith(".nii") or local_path.endswith(".nii.gz"): img_nii = nib.load(local_path) - if img_type == 'anat': + if img_type == "anat": if len(img_nii.shape) != 3: - raise IOError('File: %s must be an anatomical image with 3 ' - 'dimensions but %d dimensions found!' - % (local_path, len(img_nii.shape))) - elif img_type == 'func': + raise IOError( + "File: %s must be an anatomical image with 3 " + "dimensions but %d dimensions found!" + % (local_path, len(img_nii.shape)) + ) + elif img_type == "func": if len(img_nii.shape) not in [3, 4]: - raise IOError('File: %s must be a functional image with 3 or ' - '4 dimensions but %d dimensions found!' - % (local_path, len(img_nii.shape))) + raise IOError( + "File: %s must be a functional image with 3 or " + "4 dimensions but %d dimensions found!" + % (local_path, len(img_nii.shape)) + ) return local_path @@ -875,41 +1031,38 @@ def gather_extraction_maps(c): ts_analysis_dict = {} sca_analysis_dict = {} - if hasattr(c, 'timeseries_extraction'): - - tsa_roi_dict = c.timeseries_extraction['tse_roi_paths'] + if hasattr(c, "timeseries_extraction"): + tsa_roi_dict = c.timeseries_extraction["tse_roi_paths"] # Timeseries and SCA config selections processing # flip the dictionary for roi_path in tsa_roi_dict.keys(): - ts_analysis_to_run = [ - x.strip() for x in tsa_roi_dict[roi_path].split(",") - ] + ts_analysis_to_run = [x.strip() for x in tsa_roi_dict[roi_path].split(",")] for analysis_type in ts_analysis_to_run: if analysis_type not in ts_analysis_dict.keys(): ts_analysis_dict[analysis_type] = [] ts_analysis_dict[analysis_type].append(roi_path) - if c.timeseries_extraction['run']: - + if c.timeseries_extraction["run"]: if not tsa_roi_dict: - err = "\n\n[!] CPAC says: Time Series Extraction is " \ - "set to run, but no ROI NIFTI file paths were " \ - "provided!\n\n" + err = ( + "\n\n[!] CPAC says: Time Series Extraction is " + "set to run, but no ROI NIFTI file paths were " + "provided!\n\n" + ) raise Exception(err) - if c.seed_based_correlation_analysis['run']: - + if c.seed_based_correlation_analysis["run"]: try: - sca_roi_dict = c.seed_based_correlation_analysis[ - 'sca_roi_paths' - ] + sca_roi_dict = c.seed_based_correlation_analysis["sca_roi_paths"] except KeyError: - err = "\n\n[!] CPAC says: Seed-based Correlation Analysis " \ - "is set to run, but no ROI NIFTI file paths were " \ - "provided!\n\n" + err = ( + "\n\n[!] CPAC says: Seed-based Correlation Analysis " + "is set to run, but no ROI NIFTI file paths were " + "provided!\n\n" + ) raise Exception(err) # flip the dictionary @@ -958,13 +1111,19 @@ def get_highest_local_res(template: Union[Path, str], tagname: str) -> Path: LookupError: Could not find template /cpac_templates/dne_T1w_2mm.nii.gz """ from CPAC.pipeline.schema import RESOLUTION_REGEX + if isinstance(template, str): template = Path(template) template_pattern = ( - RESOLUTION_REGEX.replace('^', '').replace('$', '').join([ - re.escape(_part) for _part in template.name.split(tagname, 1)])) - matching_templates = [file for file in template.parent.iterdir() if - re.match(template_pattern, file.name)] + RESOLUTION_REGEX.replace("^", "") + .replace("$", "") + .join([re.escape(_part) for _part in template.name.split(tagname, 1)]) + ) + matching_templates = [ + file + for file in template.parent.iterdir() + if re.match(template_pattern, file.name) + ] matching_templates.sort() try: return matching_templates[0] @@ -987,13 +1146,13 @@ def res_string_to_tuple(resolution): Tuple of floats, e.g. (3.438, 3.438, 3.4) """ if "x" in str(resolution): - return tuple( - float(i.replace('mm', '')) for i in resolution.split("x")) - return (float(resolution.replace('mm', '')),) * 3 + return tuple(float(i.replace("mm", "")) for i in resolution.split("x")) + return (float(resolution.replace("mm", "")),) * 3 def resolve_resolution(resolution, template, template_name, tag=None): from nipype.interfaces import afni + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import check_for_s3 @@ -1004,8 +1163,7 @@ def resolve_resolution(resolution, template, template_name, tag=None): tagname = "${" + tag + "}" try: if tagname is not None: - local_path = check_for_s3( - template.replace(tagname, str(resolution))) + local_path = check_for_s3(template.replace(tagname, str(resolution))) except (IOError, OSError): local_path = None @@ -1017,25 +1175,27 @@ def resolve_resolution(resolution, template, template_name, tag=None): if local_path is None: if tagname is not None: - if template.startswith('s3:'): - ref_template = template.replace(tagname, '1mm') + if template.startswith("s3:"): + ref_template = template.replace(tagname, "1mm") local_path = check_for_s3(ref_template) else: local_path = get_highest_local_res(template, tagname) - elif tagname is None and template.startswith('s3:'): + elif tagname is None and template.startswith("s3:"): local_path = check_for_s3(template) else: local_path = template - resample = pe.Node(interface=afni.Resample(), - name=template_name, - mem_gb=0, - mem_x=(0.0115, 'in_file', 't')) + resample = pe.Node( + interface=afni.Resample(), + name=template_name, + mem_gb=0, + mem_x=(0.0115, "in_file", "t"), + ) resample.inputs.voxel_size = res_string_to_tuple(resolution) - resample.inputs.outputtype = 'NIFTI_GZ' - resample.inputs.resample_mode = 'Cu' + resample.inputs.outputtype = "NIFTI_GZ" + resample.inputs.resample_mode = "Cu" resample.inputs.in_file = local_path - resample.base_dir = '.' + resample.base_dir = "." resampled_template = resample.run() local_path = resampled_template.outputs.out_file @@ -1043,77 +1203,82 @@ def resolve_resolution(resolution, template, template_name, tag=None): return local_path -def create_anat_datasource(wf_name='anat_datasource'): - from CPAC.pipeline import nipype_pipeline_engine as pe +def create_anat_datasource(wf_name="anat_datasource"): import nipype.interfaces.utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe + wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface( - fields=['subject', 'anat', 'creds_path', - 'dl_dir', 'img_type'], - mandatory_inputs=True), - name='inputnode') - - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') - - wf.connect(inputnode, 'anat', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - wf.connect(inputnode, 'img_type', check_s3_node, 'img_type') - - outputnode = pe.Node(util.IdentityInterface(fields=['subject', - 'anat']), - name='outputspec') - - wf.connect(inputnode, 'subject', outputnode, 'subject') - wf.connect(check_s3_node, 'local_path', outputnode, 'anat') + inputnode = pe.Node( + util.IdentityInterface( + fields=["subject", "anat", "creds_path", "dl_dir", "img_type"], + mandatory_inputs=True, + ), + name="inputnode", + ) + + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) + + wf.connect(inputnode, "anat", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + wf.connect(inputnode, "img_type", check_s3_node, "img_type") + + outputnode = pe.Node( + util.IdentityInterface(fields=["subject", "anat"]), name="outputspec" + ) + + wf.connect(inputnode, "subject", outputnode, "subject") + wf.connect(check_s3_node, "local_path", outputnode, "anat") # Return the workflow return wf -def create_roi_mask_dataflow(masks, wf_name='datasource_roi_mask'): +def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): import os mask_dict = {} for mask_file in masks: + mask_file = mask_file.rstrip("\r\n") - mask_file = mask_file.rstrip('\r\n') - - if mask_file.strip() == '' or mask_file.startswith('#'): + if mask_file.strip() == "" or mask_file.startswith("#"): continue name, desc = lookup_identifier(mask_file) - if name == 'template': + if name == "template": base_file = os.path.basename(mask_file) try: - valid_extensions = ['.nii', '.nii.gz'] + valid_extensions = [".nii", ".nii.gz"] base_name = [ - base_file[:-len(ext)] + base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ][0] - for key in ['res', 'space']: + for key in ["res", "space"]: base_name = bids_remove_entity(base_name, key) except IndexError: # pylint: disable=raise-missing-from - raise ValueError('Error in spatial_map_dataflow: File ' - f'extension of {base_file} not ".nii" or ' - '.nii.gz') + raise ValueError( + "Error in spatial_map_dataflow: File " + f'extension of {base_file} not ".nii" or ' + ".nii.gz" + ) except Exception as e: raise e @@ -1121,54 +1286,56 @@ def create_roi_mask_dataflow(masks, wf_name='datasource_roi_mask'): base_name = format_identifier(name, desc) if base_name in mask_dict: - raise ValueError('Duplicate templates/atlases not allowed: ' - f'{mask_file} {mask_dict[base_name]}') + raise ValueError( + "Duplicate templates/atlases not allowed: " + f"{mask_file} {mask_dict[base_name]}" + ) mask_dict[base_name] = mask_file wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['mask', - 'mask_file', - 'creds_path', - 'dl_dir'], - mandatory_inputs=True), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface( + fields=["mask", "mask_file", "creds_path", "dl_dir"], mandatory_inputs=True + ), + name="inputspec", + ) - mask_keys, mask_values = \ - zip(*mask_dict.items()) + mask_keys, mask_values = zip(*mask_dict.items()) inputnode.synchronize = True inputnode.iterables = [ - ('mask', mask_keys), - ('mask_file', mask_values), + ("mask", mask_keys), + ("mask_file", mask_values), ] - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) - wf.connect(inputnode, 'mask_file', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'mask' + wf.connect(inputnode, "mask_file", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "mask" - outputnode = pe.Node(util.IdentityInterface(fields=['out_file', - 'out_name']), - name='outputspec') + outputnode = pe.Node( + util.IdentityInterface(fields=["out_file", "out_name"]), name="outputspec" + ) - wf.connect(check_s3_node, 'local_path', outputnode, 'out_file') - wf.connect(inputnode, 'mask', outputnode, 'out_name') + wf.connect(check_s3_node, "local_path", outputnode, "out_file") + wf.connect(inputnode, "mask", outputnode, "out_name") return wf -def create_spatial_map_dataflow(spatial_maps, wf_name='datasource_maps'): +def create_spatial_map_dataflow(spatial_maps, wf_name="datasource_maps"): import os wf = pe.Workflow(name=wf_name) @@ -1176,126 +1343,123 @@ def create_spatial_map_dataflow(spatial_maps, wf_name='datasource_maps'): spatial_map_dict = {} for spatial_map_file in spatial_maps: - - spatial_map_file = spatial_map_file.rstrip('\r\n') + spatial_map_file = spatial_map_file.rstrip("\r\n") base_file = os.path.basename(spatial_map_file) try: - valid_extensions = ['.nii', '.nii.gz'] + valid_extensions = [".nii", ".nii.gz"] base_name = [ - base_file[:-len(ext)] + base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ][0] if base_name in spatial_map_dict: raise ValueError( - 'Files with same name not allowed: %s %s' % ( - spatial_map_file, - spatial_map_dict[base_name] - ) + "Files with same name not allowed: %s %s" + % (spatial_map_file, spatial_map_dict[base_name]) ) spatial_map_dict[base_name] = spatial_map_file - except IndexError as e: - raise Exception('Error in spatial_map_dataflow: ' - 'File extension not in .nii and .nii.gz') + except IndexError: + raise Exception( + "Error in spatial_map_dataflow: " + "File extension not in .nii and .nii.gz" + ) - inputnode = pe.Node(util.IdentityInterface(fields=['spatial_map', - 'spatial_map_file', - 'creds_path', - 'dl_dir'], - mandatory_inputs=True), - name='inputspec') + inputnode = pe.Node( + util.IdentityInterface( + fields=["spatial_map", "spatial_map_file", "creds_path", "dl_dir"], + mandatory_inputs=True, + ), + name="inputspec", + ) - spatial_map_keys, spatial_map_values = \ - zip(*spatial_map_dict.items()) + spatial_map_keys, spatial_map_values = zip(*spatial_map_dict.items()) inputnode.synchronize = True inputnode.iterables = [ - ('spatial_map', spatial_map_keys), - ('spatial_map_file', spatial_map_values), + ("spatial_map", spatial_map_keys), + ("spatial_map_file", spatial_map_values), ] - check_s3_node = pe.Node(function.Function(input_names=['file_path', - 'creds_path', - 'dl_dir', - 'img_type'], - output_names=['local_path'], - function=check_for_s3, - as_module=True), - name='check_for_s3') + check_s3_node = pe.Node( + function.Function( + input_names=["file_path", "creds_path", "dl_dir", "img_type"], + output_names=["local_path"], + function=check_for_s3, + as_module=True, + ), + name="check_for_s3", + ) - wf.connect(inputnode, 'spatial_map_file', check_s3_node, 'file_path') - wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path') - wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir') - check_s3_node.inputs.img_type = 'mask' + wf.connect(inputnode, "spatial_map_file", check_s3_node, "file_path") + wf.connect(inputnode, "creds_path", check_s3_node, "creds_path") + wf.connect(inputnode, "dl_dir", check_s3_node, "dl_dir") + check_s3_node.inputs.img_type = "mask" - select_spatial_map = pe.Node(util.IdentityInterface(fields=['out_file', - 'out_name'], - mandatory_inputs=True), - name='select_spatial_map') + select_spatial_map = pe.Node( + util.IdentityInterface(fields=["out_file", "out_name"], mandatory_inputs=True), + name="select_spatial_map", + ) - wf.connect(check_s3_node, 'local_path', select_spatial_map, 'out_file') - wf.connect(inputnode, 'spatial_map', select_spatial_map, 'out_name') + wf.connect(check_s3_node, "local_path", select_spatial_map, "out_file") + wf.connect(inputnode, "spatial_map", select_spatial_map, "out_name") return wf -def create_grp_analysis_dataflow(wf_name='gp_dataflow'): - from CPAC.pipeline import nipype_pipeline_engine as pe +def create_grp_analysis_dataflow(wf_name="gp_dataflow"): import nipype.interfaces.utility as util + + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import select_model_files wf = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['ftest', - 'grp_model', - 'model_name'], - mandatory_inputs=True), - name='inputspec') - - selectmodel = pe.Node(function.Function(input_names=['model', - 'ftest', - 'model_name'], - output_names=['fts_file', - 'con_file', - 'grp_file', - 'mat_file'], - function=select_model_files, - as_module=True), - name='selectnode') - - wf.connect(inputnode, 'ftest', - selectmodel, 'ftest') - wf.connect(inputnode, 'grp_model', - selectmodel, 'model') - wf.connect(inputnode, 'model_name', selectmodel, 'model_name') - - outputnode = pe.Node(util.IdentityInterface(fields=['fts', - 'grp', - 'mat', - 'con'], - mandatory_inputs=True), - name='outputspec') - - wf.connect(selectmodel, 'mat_file', - outputnode, 'mat') - wf.connect(selectmodel, 'grp_file', - outputnode, 'grp') - wf.connect(selectmodel, 'fts_file', - outputnode, 'fts') - wf.connect(selectmodel, 'con_file', - outputnode, 'con') + inputnode = pe.Node( + util.IdentityInterface( + fields=["ftest", "grp_model", "model_name"], mandatory_inputs=True + ), + name="inputspec", + ) + + selectmodel = pe.Node( + function.Function( + input_names=["model", "ftest", "model_name"], + output_names=["fts_file", "con_file", "grp_file", "mat_file"], + function=select_model_files, + as_module=True, + ), + name="selectnode", + ) + + wf.connect(inputnode, "ftest", selectmodel, "ftest") + wf.connect(inputnode, "grp_model", selectmodel, "model") + wf.connect(inputnode, "model_name", selectmodel, "model_name") + + outputnode = pe.Node( + util.IdentityInterface( + fields=["fts", "grp", "mat", "con"], mandatory_inputs=True + ), + name="outputspec", + ) + + wf.connect(selectmodel, "mat_file", outputnode, "mat") + wf.connect(selectmodel, "grp_file", outputnode, "grp") + wf.connect(selectmodel, "fts_file", outputnode, "fts") + wf.connect(selectmodel, "con_file", outputnode, "con") return wf def resample_func_roi(in_func, in_roi, realignment, identity_matrix): import os + import nibabel as nb + from CPAC.utils.monitoring.custom_logging import log_subprocess # load func and ROI dimension @@ -1306,32 +1470,46 @@ def resample_func_roi(in_func, in_roi, realignment, identity_matrix): # check if func size = ROI size, return func and ROI; else resample using flirt if roi_shape != func_shape: - # resample func to ROI: in_file = func, reference = ROI - if 'func_to_ROI' in realignment: + if "func_to_ROI" in realignment: in_file = in_func reference = in_roi - out_file = os.path.join(os.getcwd(), in_file[in_file.rindex( - '/') + 1:in_file.rindex('.nii')] + '_resampled.nii.gz') + out_file = os.path.join( + os.getcwd(), + in_file[in_file.rindex("/") + 1 : in_file.rindex(".nii")] + + "_resampled.nii.gz", + ) out_func = out_file out_roi = in_roi - interp = 'trilinear' + interp = "trilinear" # resample ROI to func: in_file = ROI, reference = func - elif 'ROI_to_func' in realignment: + elif "ROI_to_func" in realignment: in_file = in_roi reference = in_func - out_file = os.path.join(os.getcwd(), in_file[in_file.rindex( - '/') + 1:in_file.rindex('.nii')] + '_resampled.nii.gz') + out_file = os.path.join( + os.getcwd(), + in_file[in_file.rindex("/") + 1 : in_file.rindex(".nii")] + + "_resampled.nii.gz", + ) out_func = in_func out_roi = out_file - interp = 'nearestneighbour' - - cmd = ['flirt', '-in', in_file, - '-ref', reference, - '-out', out_file, - '-interp', interp, - '-applyxfm', '-init', identity_matrix] + interp = "nearestneighbour" + + cmd = [ + "flirt", + "-in", + in_file, + "-ref", + reference, + "-out", + out_file, + "-interp", + interp, + "-applyxfm", + "-init", + identity_matrix, + ] log_subprocess(cmd) else: diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py index f2a0a6aafb..084f299c0a 100644 --- a/CPAC/utils/test_mocks.py +++ b/CPAC/utils/test_mocks.py @@ -1,5 +1,7 @@ import os + from nipype.interfaces import utility as util + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import resolve_resolution @@ -9,206 +11,233 @@ def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=['file']), name='file_node_{0}'.format( - file_node_num) + util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" ) input_node.inputs.file = path - return input_node, 'file' + return input_node, "file" -def configuration_strategy_mock(method='FSL'): - fsldir = os.environ.get('FSLDIR') +def configuration_strategy_mock(method="FSL"): + fsldir = os.environ.get("FSLDIR") # mock the config dictionary - c = Configuration({ - "pipeline_setup": { - "output_directory": { - "path": "/output/output/pipeline_analysis_nuisance/" - "sub-M10978008_ses-NFB3" - }, - "working_directory": { - "path": "/scratch/pipeline_tests" - }, - "system_config": { - "num_ants_threads": 4 - } - }, - "registration_workflows": { - "functional_registration": { - "EPI_registration": { - "FSL-FNIRT": { - "identity_matrix": f"{fsldir}/etc/flirtsch/" - "ident.mat", - "interpolation": "sinc" - } + c = Configuration( + { + "pipeline_setup": { + "output_directory": { + "path": "/output/output/pipeline_analysis_nuisance/" + "sub-M10978008_ses-NFB3" }, - "func_registration_to_template": { - "ANTs_pipelines": { - "interpolation": "LanczosWindowedSinc" + "working_directory": {"path": "/scratch/pipeline_tests"}, + "system_config": {"num_ants_threads": 4}, + }, + "registration_workflows": { + "functional_registration": { + "EPI_registration": { + "FSL-FNIRT": { + "identity_matrix": f"{fsldir}/etc/flirtsch/" "ident.mat", + "interpolation": "sinc", + } }, - "output_resolution": { - "func_preproc_outputs": "3mm", - "func_derivative_outputs": "3mm" + "func_registration_to_template": { + "ANTs_pipelines": {"interpolation": "LanczosWindowedSinc"}, + "output_resolution": { + "func_preproc_outputs": "3mm", + "func_derivative_outputs": "3mm", + }, + "target_template": { + "T1_template": { + "T1w_template_for_resample": f"{fsldir}/" + "data/standard/" + "MNI152_T1_1mm_brain." + "nii.gz", + "T1w_brain_template_funcreg": f"{fsldir}/" + "data/standard/" + "MNI152_T1_" + "${resolution_for_" + "func_preproc}_" + "brain.nii.gz", + "T1w_template_funcreg": f"{fsldir}/data/" + "standard/MNI152_T1_" + "${resolution_for_func_" + "preproc}.nii.gz", + } + }, }, - "target_template": { - "T1_template": { - "T1w_template_for_resample": f"{fsldir}/" - "data/standard/" - "MNI152_T1_1mm_brain." - "nii.gz", - "T1w_brain_template_funcreg": f"{fsldir}/" - "data/standard/" - "MNI152_T1_" - "${resolution_for_" - "func_preproc}_" - "brain.nii.gz", - "T1w_template_funcreg": f"{fsldir}/data/" - "standard/MNI152_T1_" - "${resolution_for_func_" - "preproc}.nii.gz" - } - } } - } - }, - "post_processing": { - "spatial_smoothing": { - "fwhm": [2, 3, 4] - } + }, + "post_processing": {"spatial_smoothing": {"fwhm": [2, 3, 4]}}, } - }) + ) - if method == 'ANTS': - c.update('regOption', 'ANTS') + if method == "ANTS": + c.update("regOption", "ANTS") else: - c.update('regOption', 'FSL') + c.update("regOption", "FSL") # mock the strategy strat = Strategy() resource_dict = { - "functional_nuisance_residuals": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "motion_correct/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg.nii.gz"), - "mean_functional": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "mean_functional/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg_calc_tstat.nii.gz"), - "functional_brain_mask": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "functional_brain_mask/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg_mask.nii.gz"), - "motion_correct": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "motion_correct/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg.nii.gz"), - "anatomical_brain": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_brain/" - "sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz"), - "ants_initial_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_initial_xfm/" - "transform0DerivedInitialMovingTranslation.mat"), - "ants_affine_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_affine_xfm/transform2Affine.mat"), - "ants_rigid_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_rigid_xfm/transform1Rigid.mat"), - "anatomical_to_mni_linear_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_to_mni_linear_xfm/" - "sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat"), - "functional_to_anat_linear_xfm": os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "functional_to_anat_linear_xfm/_scan_test/" - "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" - "volreg_calc_tstat_flirt.mat"), - 'ants_symm_warp_field': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_to_symmetric_mni_nonlinear_xfm/" - "transform3Warp.nii.gz"), - 'ants_symm_affine_xfm': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_symmetric_affine_xfm/transform2Affine.mat"), - 'ants_symm_rigid_xfm': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_symmetric_rigid_xfm/transform1Rigid.mat"), - 'ants_symm_initial_xfm': os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "ants_symmetric_initial_xfm/" - "transform0DerivedInitialMovingTranslation.mat"), - "dr_tempreg_maps_files": [os.path.join( - '/scratch', - 'resting_preproc_sub-M10978008_ses-NFB3_cpac105', - 'temporal_dual_regression_0/_scan_test/' - '_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/' - '_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_' - '..cpac_templates..PNAS_Smith09_rsn10.nii.gz/' - 'split_raw_volumes/temp_reg_map_000{0}.nii.gz'.format(n) - ) for n in range(10)] + "functional_nuisance_residuals": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "motion_correct/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg.nii.gz", + ), + "mean_functional": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "mean_functional/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg_calc_tstat.nii.gz", + ), + "functional_brain_mask": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "functional_brain_mask/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg_mask.nii.gz", + ), + "motion_correct": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "motion_correct/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg.nii.gz", + ), + "anatomical_brain": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "anatomical_brain/" "sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", + ), + "ants_initial_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_initial_xfm/" "transform0DerivedInitialMovingTranslation.mat", + ), + "ants_affine_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_affine_xfm/transform2Affine.mat", + ), + "ants_rigid_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_rigid_xfm/transform1Rigid.mat", + ), + "anatomical_to_mni_linear_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "anatomical_to_mni_linear_xfm/" + "sub-M10978008_ses-NFB3_T1w_resample_calc_flirt.mat", + ), + "functional_to_anat_linear_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "functional_to_anat_linear_xfm/_scan_test/" + "sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_" + "volreg_calc_tstat_flirt.mat", + ), + "ants_symm_warp_field": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "anatomical_to_symmetric_mni_nonlinear_xfm/" "transform3Warp.nii.gz", + ), + "ants_symm_affine_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_symmetric_affine_xfm/transform2Affine.mat", + ), + "ants_symm_rigid_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_symmetric_rigid_xfm/transform1Rigid.mat", + ), + "ants_symm_initial_xfm": os.path.join( + c["pipeline_setup", "output_directory", "path"], + "ants_symmetric_initial_xfm/" + "transform0DerivedInitialMovingTranslation.mat", + ), + "dr_tempreg_maps_files": [ + os.path.join( + "/scratch", + "resting_preproc_sub-M10978008_ses-NFB3_cpac105", + "temporal_dual_regression_0/_scan_test/" + "_selector_CSF-2mmE-M_aC-WM-2mmE-DPC5_G-M_M-SDB_P-2/" + "_spatial_map_PNAS_Smith09_rsn10_spatial_map_file_" + "..cpac_templates..PNAS_Smith09_rsn10.nii.gz/" + f"split_raw_volumes/temp_reg_map_000{n}.nii.gz", + ) + for n in range(10) + ], } - if method == 'ANTS': + if method == "ANTS": resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( - c['pipeline_setup', 'output_directory', 'path'], - "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz") + c["pipeline_setup", "output_directory", "path"], + "anatomical_to_mni_nonlinear_xfm/transform3Warp.nii.gz", + ) else: resource_dict["anatomical_to_mni_nonlinear_xfm"] = os.path.join( - c['pipeline_setup', 'output_directory', 'path'], + c["pipeline_setup", "output_directory", "path"], "anatomical_to_mni_nonlinear_xfm/" - "sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz") + "sub-M10978008_ses-NFB3_T1w_resample_fieldwarp.nii.gz", + ) file_node_num = 0 for resource, filepath in resource_dict.items(): - strat.update_resource_pool({ - resource: file_node(filepath, file_node_num) - }) - strat.append_name(resource+'_0') + strat.update_resource_pool({resource: file_node(filepath, file_node_num)}) + strat.append_name(resource + "_0") file_node_num += 1 templates_for_resampling = [ - (c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'output_resolution', - 'func_preproc_outputs'], - c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'target_template', 'T1_template', - 'T1w_brain_template_funcreg'], - 'template_brain_for_func_preproc', - 'resolution_for_func_preproc'), - (c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'output_resolution', - 'func_preproc_outputs'], - c['registration_workflows', 'functional_registration', - 'func_registration_to_template', 'target_template', 'T1_template', - 'T1w_brain_template_funcreg'], - 'template_skull_for_func_preproc', - 'resolution_for_func_preproc') + ( + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "output_resolution", + "func_preproc_outputs", + ], + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "target_template", + "T1_template", + "T1w_brain_template_funcreg", + ], + "template_brain_for_func_preproc", + "resolution_for_func_preproc", + ), + ( + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "output_resolution", + "func_preproc_outputs", + ], + c[ + "registration_workflows", + "functional_registration", + "func_registration_to_template", + "target_template", + "T1_template", + "T1w_brain_template_funcreg", + ], + "template_skull_for_func_preproc", + "resolution_for_func_preproc", + ), ] for resolution, template, template_name, tag in templates_for_resampling: - resampled_template = pe.Node(Function(input_names=[ - 'resolution', 'template', 'template_name', 'tag' - ], - output_names=[ - 'resampled_template' - ], - function=resolve_resolution, - as_module=True), - name='resampled_' + template_name) + resampled_template = pe.Node( + Function( + input_names=["resolution", "template", "template_name", "tag"], + output_names=["resampled_template"], + function=resolve_resolution, + as_module=True, + ), + name="resampled_" + template_name, + ) resampled_template.inputs.resolution = resolution resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag - strat.update_resource_pool({ - template_name: (resampled_template, 'resampled_template')}) - strat.append_name('resampled_template_0') + strat.update_resource_pool( + {template_name: (resampled_template, "resampled_template")} + ) + strat.append_name("resampled_template_0") return c, strat diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index acdbbfbb02..71b970dac0 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -18,44 +18,51 @@ import argparse import datetime import os +import shutil import subprocess import sys import time -import shutil from warnings import simplefilter -from nipype import logging + import yaml -from CPAC import license_notice, __version__ +from nipype import logging + +from CPAC import __version__, license_notice from CPAC.pipeline import AVAILABLE_PIPELINE_CONFIGS from CPAC.pipeline.random_state import set_up_random_state from CPAC.pipeline.schema import str_to_bool1_1 -from CPAC.utils.bids_utils import cl_strip_brackets, \ - create_cpac_data_config, \ - load_cpac_data_config, \ - load_yaml_config, \ - sub_list_filter_by_labels +from CPAC.utils.bids_utils import ( + cl_strip_brackets, + create_cpac_data_config, + load_cpac_data_config, + load_yaml_config, + sub_list_filter_by_labels, +) from CPAC.utils.configuration import Configuration, preconfig_yaml, set_subject +from CPAC.utils.configuration.yaml_template import ( + create_yaml_from_template, + hash_data_config, + upgrade_pipeline_to_1_8, +) from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.monitoring import failed_to_start, log_nodes_cb -from CPAC.utils.configuration.yaml_template import create_yaml_from_template, \ - hash_data_config, \ - upgrade_pipeline_to_1_8 from CPAC.utils.utils import update_nested_dict -simplefilter(action='ignore', category=FutureWarning) -logger = logging.getLogger('nipype.workflow') + +simplefilter(action="ignore", category=FutureWarning) +logger = logging.getLogger("nipype.workflow") DEFAULT_TMP_DIR = "/tmp" def run(command, env=None): if env is None: env = {} - process = subprocess.Popen(command, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=True, env=env) + process = subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, env=env + ) while True: line = process.stdout.readline() line = line.decode()[:-1] - if line == '' and process.poll() is not None: + if line == "" and process.poll() is not None: break @@ -71,324 +78,413 @@ def parse_yaml(value): def resolve_aws_credential(source): - if source == "env": from urllib.request import urlopen + aws_creds_address = "169.254.170.2{}".format( os.environ["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] ) aws_creds = urlopen(aws_creds_address).read() - aws_input_creds = "/tmp/aws_input_creds_%d.csv" % int( - round(time.time() * 1000) - ) + aws_input_creds = "/tmp/aws_input_creds_%d.csv" % int(round(time.time() * 1000)) with open(aws_input_creds) as ofd: for key, vname in [ ("AccessKeyId", "AWSAcessKeyId"), - ("SecretAccessKey", "AWSSecretKey") + ("SecretAccessKey", "AWSSecretKey"), ]: - ofd.write("{0}={1}".format(vname, aws_creds[key])) + ofd.write(f"{vname}={aws_creds[key]}") return aws_input_creds if os.path.isfile(source): return source else: - raise IOError( - "Could not find aws credentials {0}" - .format(source) - ) + raise IOError(f"Could not find aws credentials {source}") def run_main(): """Run this function if not importing as a script""" - parser = argparse.ArgumentParser(description='C-PAC Pipeline Runner. ' + - license_notice) - parser.add_argument('bids_dir', - help='The directory with the input dataset ' - 'formatted according to the BIDS standard. ' - 'Use the format s3://bucket/path/to/bidsdir to ' - 'read data directly from an S3 bucket. This may ' - 'require AWS S3 credentials specified via the ' - '--aws_input_creds option.') - parser.add_argument('output_dir', - help='The directory where the output files should be ' - 'stored. If you are running group level analysis ' - 'this folder should be prepopulated with the ' - 'results of the participant level analysis. Use ' - 'the format s3://bucket/path/to/bidsdir to ' - 'write data directly to an S3 bucket. This may ' - 'require AWS S3 credentials specified via the ' - '--aws_output_creds option.') - parser.add_argument('analysis_level', - help='Level of the analysis that will be performed. ' - 'Multiple participant level analyses can be run ' - 'independently (in parallel) using the same ' - 'output_dir. test_config will run through the ' - 'entire configuration process but will not ' - 'execute the pipeline.', - choices=['participant', 'group', 'test_config', 'cli'], - type=lambda choice: choice.replace('-', '_').lower()) - - parser.add_argument('--pipeline-file', '--pipeline_file', - help='Path for the pipeline configuration file to ' - 'use. Use the format s3://bucket/path/to/' - 'pipeline_file to read data directly from an ' - 'S3 bucket. This may require AWS S3 credentials ' - 'specified via the --aws_input_creds option.', - default=preconfig_yaml('default')) - parser.add_argument('--group-file', '--group_file', - help='Path for the group analysis configuration file ' - 'to use. Use the format s3://bucket/path/to/' - 'pipeline_file to read data directly from an S3 ' - 'bucket. This may require AWS S3 credentials ' - 'specified via the --aws_input_creds option. ' - 'The output directory needs to refer to the ' - 'output of a preprocessing individual pipeline.', - default=None) - parser.add_argument('--data-config-file', '--data_config_file', - help='Yaml file containing the location of the data ' - 'that is to be processed. This file is not ' - 'necessary if the data in bids_dir is organized ' - 'according to the BIDS format. This enables ' - 'support for legacy data organization and cloud ' - 'based storage. A bids_dir must still be ' - 'specified when using this option, but its ' - 'value will be ignored. Use the format s3://' - 'bucket/path/to/data_config_file to read data ' - 'directly from an S3 bucket. This may require ' - 'AWS S3 credentials specified via the ' - '--aws_input_creds option.', - default=None) - - parser.add_argument('--preconfig', - help='Name of the preconfigured pipeline to run. ' - 'Available preconfigured pipelines: ' + - str(AVAILABLE_PIPELINE_CONFIGS) + '. See ' - f'{DOCS_URL_PREFIX}/user/pipelines/preconfig ' - 'for more information about the preconfigured ' - 'pipelines.', - default=None) - if [_ for _ in ['--pipeline-override', - '--pipeline_override']if _ in sys.argv]: # secret option - parser.add_argument('--pipeline-override', '--pipeline_override', - type=parse_yaml, action='append', - help='Override specific options from the ' - 'pipeline configuration. E.g.: ' - '"{\'pipeline_setup\': {\'system_config\': ' - '{\'maximum_memory_per_participant\': 1}}}"') - - parser.add_argument('--aws-input-creds', '--aws_input_creds', - help='Credentials for reading from S3. If not ' - 'provided and s3 paths are specified in the ' - 'data config we will try to access the bucket ' - 'anonymously use the string "env" to indicate ' - 'that input credentials should read from the ' - 'environment. (E.g. when using AWS iam roles).', - default=None) - parser.add_argument('--aws-output-creds', '--aws_output_creds', - help='Credentials for writing to S3. If not provided ' - 'and s3 paths are specified in the output ' - 'directory we will try to access the bucket ' - 'anonymously use the string "env" to indicate ' - 'that output credentials should read from the ' - 'environment. (E.g. when using AWS iam roles).', - default=None) + parser = argparse.ArgumentParser( + description="C-PAC Pipeline Runner. " + license_notice + ) + parser.add_argument( + "bids_dir", + help="The directory with the input dataset " + "formatted according to the BIDS standard. " + "Use the format s3://bucket/path/to/bidsdir to " + "read data directly from an S3 bucket. This may " + "require AWS S3 credentials specified via the " + "--aws_input_creds option.", + ) + parser.add_argument( + "output_dir", + help="The directory where the output files should be " + "stored. If you are running group level analysis " + "this folder should be prepopulated with the " + "results of the participant level analysis. Use " + "the format s3://bucket/path/to/bidsdir to " + "write data directly to an S3 bucket. This may " + "require AWS S3 credentials specified via the " + "--aws_output_creds option.", + ) + parser.add_argument( + "analysis_level", + help="Level of the analysis that will be performed. " + "Multiple participant level analyses can be run " + "independently (in parallel) using the same " + "output_dir. test_config will run through the " + "entire configuration process but will not " + "execute the pipeline.", + choices=["participant", "group", "test_config", "cli"], + type=lambda choice: choice.replace("-", "_").lower(), + ) + + parser.add_argument( + "--pipeline-file", + "--pipeline_file", + help="Path for the pipeline configuration file to " + "use. Use the format s3://bucket/path/to/" + "pipeline_file to read data directly from an " + "S3 bucket. This may require AWS S3 credentials " + "specified via the --aws_input_creds option.", + default=preconfig_yaml("default"), + ) + parser.add_argument( + "--group-file", + "--group_file", + help="Path for the group analysis configuration file " + "to use. Use the format s3://bucket/path/to/" + "pipeline_file to read data directly from an S3 " + "bucket. This may require AWS S3 credentials " + "specified via the --aws_input_creds option. " + "The output directory needs to refer to the " + "output of a preprocessing individual pipeline.", + default=None, + ) + parser.add_argument( + "--data-config-file", + "--data_config_file", + help="Yaml file containing the location of the data " + "that is to be processed. This file is not " + "necessary if the data in bids_dir is organized " + "according to the BIDS format. This enables " + "support for legacy data organization and cloud " + "based storage. A bids_dir must still be " + "specified when using this option, but its " + "value will be ignored. Use the format s3://" + "bucket/path/to/data_config_file to read data " + "directly from an S3 bucket. This may require " + "AWS S3 credentials specified via the " + "--aws_input_creds option.", + default=None, + ) + + parser.add_argument( + "--preconfig", + help="Name of the preconfigured pipeline to run. " + "Available preconfigured pipelines: " + + str(AVAILABLE_PIPELINE_CONFIGS) + + ". See " + f"{DOCS_URL_PREFIX}/user/pipelines/preconfig " + "for more information about the preconfigured " + "pipelines.", + default=None, + ) + if [ + _ for _ in ["--pipeline-override", "--pipeline_override"] if _ in sys.argv + ]: # secret option + parser.add_argument( + "--pipeline-override", + "--pipeline_override", + type=parse_yaml, + action="append", + help="Override specific options from the " + "pipeline configuration. E.g.: " + "\"{'pipeline_setup': {'system_config': " + "{'maximum_memory_per_participant': 1}}}\"", + ) + + parser.add_argument( + "--aws-input-creds", + "--aws_input_creds", + help="Credentials for reading from S3. If not " + "provided and s3 paths are specified in the " + "data config we will try to access the bucket " + 'anonymously use the string "env" to indicate ' + "that input credentials should read from the " + "environment. (E.g. when using AWS iam roles).", + default=None, + ) + parser.add_argument( + "--aws-output-creds", + "--aws_output_creds", + help="Credentials for writing to S3. If not provided " + "and s3 paths are specified in the output " + "directory we will try to access the bucket " + 'anonymously use the string "env" to indicate ' + "that output credentials should read from the " + "environment. (E.g. when using AWS iam roles).", + default=None, + ) # TODO: restore for <--n_cpus> once we remove # from config file # - parser.add_argument('--n-cpus', '--n_cpus', type=int, default=0, - help='Number of execution resources per participant ' - 'available for the pipeline. This flag takes ' - 'precidence over max_cores_per_participant in ' - 'the pipeline configuration file.') - parser.add_argument('--mem-mb', '--mem_mb', type=float, - help='Amount of RAM available per participant in ' - 'megabytes. Included for compatibility with ' - 'BIDS-Apps standard, but mem_gb is preferred. ' - 'This flag takes precedence over ' - 'maximum_memory_per_participant in the pipeline ' - 'configuration file.') - parser.add_argument('--mem-gb', '--mem_gb', type=float, - help='Amount of RAM available per participant in ' - 'gigabytes. If this is specified along with ' - 'mem_mb, this flag will take precedence. This ' - 'flag also takes precedence over ' - 'maximum_memory_per_participant in the pipeline ' - 'configuration file.') - parser.add_argument('--runtime-usage', '--runtime_usage', type=str, - help='Path to a callback.log from a prior run of the ' - 'same pipeline configuration (including any ' - 'resource-management parameters that will be ' - "applied in this run, like 'n_cpus' and " - "'num_ants_threads'). This log will be used to " - 'override per-node memory estimates with ' - 'observed values plus a buffer.') - parser.add_argument('--runtime-buffer', '--runtime_buffer', type=float, - help='Buffer to add to per-node memory estimates if ' - '--runtime_usage is specified. This number is a ' - 'percentage of the observed memory usage.') - parser.add_argument('--num-ants-threads', '--num_ants_threads', type=int, - default=0, - help='The number of cores to allocate to ANTS-' - 'based anatomical registration per ' - 'participant. Multiple cores can greatly ' - 'speed up this preprocessing step. This ' - 'number cannot be greater than the number of ' - 'cores per participant.') - parser.add_argument('--random-seed', '--random_seed', type=str, - help='Random seed used to fix the state of execution. ' - 'If unset, each process uses its own default. If ' - 'set, a `random.log` file will be generated ' - 'logging the random state used by each process. ' - 'If set to a positive integer (up to 2147483647' - '), that integer will be used to seed each ' - 'process. If set to \'random\', a random seed ' - 'will be generated and recorded for each ' - 'process.') - parser.add_argument('--save-working-dir', '--save_working_dir', nargs='?', - help='Save the contents of the working directory.', - default=False) - parser.add_argument('--fail-fast', '--fail_fast', type=str.title, - help='Stop worklow execution on first crash?') - parser.add_argument('--participant-label', '--participant_label', - help='The label of the participant that should be ' - 'analyzed. The label corresponds to ' - 'sub- from the BIDS spec ' - '(so it does not include "sub-"). If this ' - 'parameter is not provided all participants ' - 'should be analyzed. Multiple participants ' - 'can be specified with a space separated ' - 'list.', - nargs="+") - parser.add_argument('--participant-ndx', '--participant_ndx', - help='The index of the participant that should be ' - 'analyzed. This corresponds to the index of ' - 'the participant in the data config file. ' - 'This was added to make it easier to ' - 'accommodate SGE array jobs. Only a single ' - 'participant will be analyzed. Can be used ' - 'with participant label, in which case it is ' - 'the index into the list that follows the ' - 'participant_label flag. Use the value "-1" ' - 'to indicate that the participant index ' - 'should be read from the ' - 'AWS_BATCH_JOB_ARRAY_INDEX environment ' - 'variable.', - default=None, type=int) - - parser.add_argument('--T1w-label', '--T1w_label', - help='C-PAC only runs one T1w per participant-' - 'session at a time, at this time. Use this ' - 'flag to specify any BIDS entity (e.g., "acq-' - 'VNavNorm") or sequence of BIDS entities (' - 'e.g., "acq-VNavNorm_run-1") to specify ' - 'which of multiple T1w files to use. Specify ' - '"--T1w_label T1w" to choose the T1w file ' - 'with the fewest BIDS entities (i.e., the ' - 'final option of [*_acq-VNavNorm_T1w.nii.gz, ' - '*_acq-HCP_T1w.nii.gz, *_T1w.nii.gz"]). ' - 'C-PAC will choose the first T1w it finds if ' - 'the user does not provide this flag, or ' - 'if multiple T1w files match the --T1w_label ' - 'provided.\nIf multiple T2w files are present ' - 'and a comparable filter is possible, T2w ' - 'files will be filtered as well. If no T2w files ' - 'match this --T1w_label, T2w files will be ' - 'processed as if no --T1w_label were provided.') - parser.add_argument('--bold-label', '--bold_label', - help='To include a specified subset of available ' - 'BOLD files, use this flag to specify any ' - 'BIDS entity (e.g., "task-rest") or sequence ' - 'of BIDS entities (e.g. "task-rest_run-1"). ' - 'To specify the bold file with the fewest ' - 'BIDS entities in the file name, specify ' - '"--bold_label bold". Multiple `--bold_' - 'label`s can be specified with a space-' - 'separated list. If multiple `--bold_label`s ' - 'are provided (e.g., "--bold_label task-rest_' - 'run-1 task-rest_run-2", each scan that ' - 'includes all BIDS entities specified in any ' - 'of the provided `--bold_label`s will be ' - 'analyzed. If this parameter is not provided ' - 'all BOLD scans should be analyzed.', - nargs="+") - - parser.add_argument('-v', '--version', action='version', - version=f'C-PAC BIDS-App version {__version__}') - parser.add_argument('--bids-validator-config', '--bids_validator_config', - help='JSON file specifying configuration of ' - 'bids-validator: See https://github.com/bids-' - 'standard/bids-validator for more info.') - parser.add_argument('--skip-bids-validator', '--skip_bids_validator', - help='Skips bids validation.', - action='store_true') - - parser.add_argument('--anat-only', '--anat_only', - help='run only the anatomical preprocessing', - action='store_true') - - parser.add_argument('--user_defined', type=str, - help='Arbitrary user defined string that will be ' - 'included in every output sidecar file.') - - parser.add_argument('--tracking-opt-out', '--tracking_opt-out', - action='store_true', - help='Disable usage tracking. Only the number of ' - 'participants on the analysis is tracked.', - default=False) - - parser.add_argument('--monitoring', - help='Enable monitoring server on port 8080. You ' - 'need to bind the port using the Docker ' - 'flag "-p".', - action='store_true') - - parser.add_argument('--freesurfer_dir', '--freesurfer-dir', - help='Specify path to pre-computed FreeSurfer outputs ' - 'to pull into C-PAC run', - default=False) + parser.add_argument( + "--n-cpus", + "--n_cpus", + type=int, + default=0, + help="Number of execution resources per participant " + "available for the pipeline. This flag takes " + "precidence over max_cores_per_participant in " + "the pipeline configuration file.", + ) + parser.add_argument( + "--mem-mb", + "--mem_mb", + type=float, + help="Amount of RAM available per participant in " + "megabytes. Included for compatibility with " + "BIDS-Apps standard, but mem_gb is preferred. " + "This flag takes precedence over " + "maximum_memory_per_participant in the pipeline " + "configuration file.", + ) + parser.add_argument( + "--mem-gb", + "--mem_gb", + type=float, + help="Amount of RAM available per participant in " + "gigabytes. If this is specified along with " + "mem_mb, this flag will take precedence. This " + "flag also takes precedence over " + "maximum_memory_per_participant in the pipeline " + "configuration file.", + ) + parser.add_argument( + "--runtime-usage", + "--runtime_usage", + type=str, + help="Path to a callback.log from a prior run of the " + "same pipeline configuration (including any " + "resource-management parameters that will be " + "applied in this run, like 'n_cpus' and " + "'num_ants_threads'). This log will be used to " + "override per-node memory estimates with " + "observed values plus a buffer.", + ) + parser.add_argument( + "--runtime-buffer", + "--runtime_buffer", + type=float, + help="Buffer to add to per-node memory estimates if " + "--runtime_usage is specified. This number is a " + "percentage of the observed memory usage.", + ) + parser.add_argument( + "--num-ants-threads", + "--num_ants_threads", + type=int, + default=0, + help="The number of cores to allocate to ANTS-" + "based anatomical registration per " + "participant. Multiple cores can greatly " + "speed up this preprocessing step. This " + "number cannot be greater than the number of " + "cores per participant.", + ) + parser.add_argument( + "--random-seed", + "--random_seed", + type=str, + help="Random seed used to fix the state of execution. " + "If unset, each process uses its own default. If " + "set, a `random.log` file will be generated " + "logging the random state used by each process. " + "If set to a positive integer (up to 2147483647" + "), that integer will be used to seed each " + "process. If set to 'random', a random seed " + "will be generated and recorded for each " + "process.", + ) + parser.add_argument( + "--save-working-dir", + "--save_working_dir", + nargs="?", + help="Save the contents of the working directory.", + default=False, + ) + parser.add_argument( + "--fail-fast", + "--fail_fast", + type=str.title, + help="Stop worklow execution on first crash?", + ) + parser.add_argument( + "--participant-label", + "--participant_label", + help="The label of the participant that should be " + "analyzed. The label corresponds to " + "sub- from the BIDS spec " + '(so it does not include "sub-"). If this ' + "parameter is not provided all participants " + "should be analyzed. Multiple participants " + "can be specified with a space separated " + "list.", + nargs="+", + ) + parser.add_argument( + "--participant-ndx", + "--participant_ndx", + help="The index of the participant that should be " + "analyzed. This corresponds to the index of " + "the participant in the data config file. " + "This was added to make it easier to " + "accommodate SGE array jobs. Only a single " + "participant will be analyzed. Can be used " + "with participant label, in which case it is " + "the index into the list that follows the " + 'participant_label flag. Use the value "-1" ' + "to indicate that the participant index " + "should be read from the " + "AWS_BATCH_JOB_ARRAY_INDEX environment " + "variable.", + default=None, + type=int, + ) + + parser.add_argument( + "--T1w-label", + "--T1w_label", + help="C-PAC only runs one T1w per participant-" + "session at a time, at this time. Use this " + 'flag to specify any BIDS entity (e.g., "acq-' + 'VNavNorm") or sequence of BIDS entities (' + 'e.g., "acq-VNavNorm_run-1") to specify ' + "which of multiple T1w files to use. Specify " + '"--T1w_label T1w" to choose the T1w file ' + "with the fewest BIDS entities (i.e., the " + "final option of [*_acq-VNavNorm_T1w.nii.gz, " + '*_acq-HCP_T1w.nii.gz, *_T1w.nii.gz"]). ' + "C-PAC will choose the first T1w it finds if " + "the user does not provide this flag, or " + "if multiple T1w files match the --T1w_label " + "provided.\nIf multiple T2w files are present " + "and a comparable filter is possible, T2w " + "files will be filtered as well. If no T2w files " + "match this --T1w_label, T2w files will be " + "processed as if no --T1w_label were provided.", + ) + parser.add_argument( + "--bold-label", + "--bold_label", + help="To include a specified subset of available " + "BOLD files, use this flag to specify any " + 'BIDS entity (e.g., "task-rest") or sequence ' + 'of BIDS entities (e.g. "task-rest_run-1"). ' + "To specify the bold file with the fewest " + "BIDS entities in the file name, specify " + '"--bold_label bold". Multiple `--bold_' + "label`s can be specified with a space-" + "separated list. If multiple `--bold_label`s " + 'are provided (e.g., "--bold_label task-rest_' + 'run-1 task-rest_run-2", each scan that ' + "includes all BIDS entities specified in any " + "of the provided `--bold_label`s will be " + "analyzed. If this parameter is not provided " + "all BOLD scans should be analyzed.", + nargs="+", + ) + + parser.add_argument( + "-v", + "--version", + action="version", + version=f"C-PAC BIDS-App version {__version__}", + ) + parser.add_argument( + "--bids-validator-config", + "--bids_validator_config", + help="JSON file specifying configuration of " + "bids-validator: See https://github.com/bids-" + "standard/bids-validator for more info.", + ) + parser.add_argument( + "--skip-bids-validator", + "--skip_bids_validator", + help="Skips bids validation.", + action="store_true", + ) + + parser.add_argument( + "--anat-only", + "--anat_only", + help="run only the anatomical preprocessing", + action="store_true", + ) + + parser.add_argument( + "--user_defined", + type=str, + help="Arbitrary user defined string that will be " + "included in every output sidecar file.", + ) + + parser.add_argument( + "--tracking-opt-out", + "--tracking_opt-out", + action="store_true", + help="Disable usage tracking. Only the number of " + "participants on the analysis is tracked.", + default=False, + ) + + parser.add_argument( + "--monitoring", + help="Enable monitoring server on port 8080. You " + "need to bind the port using the Docker " + 'flag "-p".', + action="store_true", + ) + + parser.add_argument( + "--freesurfer_dir", + "--freesurfer-dir", + help="Specify path to pre-computed FreeSurfer outputs " + "to pull into C-PAC run", + default=False, + ) # get the command line arguments args = parser.parse_args( - sys.argv[ - 1:( - sys.argv.index('--') - if '--' in sys.argv - else len(sys.argv) - ) - ] + sys.argv[1 : (sys.argv.index("--") if "--" in sys.argv else len(sys.argv))] ) bids_dir_is_s3 = args.bids_dir.lower().startswith("s3://") - bids_dir = args.bids_dir if bids_dir_is_s3 else os.path.realpath( - args.bids_dir) + bids_dir = args.bids_dir if bids_dir_is_s3 else os.path.realpath(args.bids_dir) output_dir_is_s3 = args.output_dir.lower().startswith("s3://") - output_dir = args.output_dir if output_dir_is_s3 else os.path.realpath( - args.output_dir) + output_dir = ( + args.output_dir if output_dir_is_s3 else os.path.realpath(args.output_dir) + ) exitcode = 0 if args.analysis_level == "cli": from CPAC.__main__ import main - main.main(args=sys.argv[sys.argv.index('--') + 1:]) + + main.main(args=sys.argv[sys.argv.index("--") + 1 :]) sys.exit(0) elif args.analysis_level == "group": if not args.group_file or not os.path.exists(args.group_file): - print() print("No group analysis configuration file was supplied.") print() import pkg_resources as p - args.group_file = \ - p.resource_filename( - "CPAC", - os.path.join( - "resources", - "configs", - "group_config_template.yml" - ) - ) + + args.group_file = p.resource_filename( + "CPAC", + os.path.join("resources", "configs", "group_config_template.yml"), + ) output_group = os.path.join(output_dir, "group_config.yml") @@ -400,8 +496,10 @@ def run_main(): shutil.copyfile(args.group_file, output_group) except (Exception, IOError): print("Could not create group analysis configuration file.") - print("Please refer to the C-PAC documentation for group " - "analysis setup.") + print( + "Please refer to the C-PAC documentation for group " + "analysis setup." + ) print() else: print( @@ -409,38 +507,37 @@ def run_main(): "the file and, after customizing to your analysis, add " "the flag" "\n\n" - " --group_file {0}" + f" --group_file {output_group}" "\n\n" "to your `docker run` command" "\n" - .format(output_group) ) sys.exit(1) else: import CPAC.pipeline.cpac_group_runner as cgr - print("Starting group level analysis of data in {0} using " - "{1}".format(bids_dir, args.group_file)) + + print( + f"Starting group level analysis of data in {bids_dir} using " + f"{args.group_file}" + ) cgr.run(args.group_file) sys.exit(0) elif args.analysis_level in ["test_config", "participant"]: - # check to make sure that the input directory exists if ( - not args.data_config_file and - not bids_dir_is_s3 and - not os.path.exists(bids_dir) + not args.data_config_file + and not bids_dir_is_s3 + and not os.path.exists(bids_dir) ): - print(f"Error! Could not find {bids_dir}") sys.exit(1) # check to make sure that the output directory exists if not output_dir_is_s3 and not os.path.exists(output_dir): - try: os.makedirs(output_dir) except Exception: @@ -452,14 +549,11 @@ def run_main(): print() if args.bids_validator_config: print("Running BIDS validator") - run("bids-validator --config {config} {bids_dir}".format( - config=args.bids_validator_config, - bids_dir=bids_dir - )) + run(f"bids-validator --config {args.bids_validator_config} {bids_dir}") elif args.skip_bids_validator: - print('Skipping bids-validator...') + print("Skipping bids-validator...") elif bids_dir_is_s3: - print('Skipping bids-validator for S3 datasets...') + print("Skipping bids-validator for S3 datasets...") else: print("Running BIDS validator") run(f"bids-validator {bids_dir}") @@ -474,285 +568,317 @@ def run_main(): else: c = load_yaml_config(args.pipeline_file, args.aws_input_creds) - if 'pipeline_setup' not in c: - _url = (f'{DOCS_URL_PREFIX}/user/pipelines/' - '1.7-1.8-nesting-mappings') + if "pipeline_setup" not in c: + _url = f"{DOCS_URL_PREFIX}/user/pipelines/" "1.7-1.8-nesting-mappings" - logger.warning('\nC-PAC changed its pipeline configuration ' - 'format in v1.8.0.\nSee %s for details.\n', _url) + logger.warning( + "\nC-PAC changed its pipeline configuration " + "format in v1.8.0.\nSee %s for details.\n", + _url, + ) updated_config = os.path.join( - output_dir, - 'updated_config', - os.path.basename(args.pipeline_file) + output_dir, "updated_config", os.path.basename(args.pipeline_file) ) - os.makedirs( - os.path.join(output_dir, 'updated_config'), exist_ok=True) + os.makedirs(os.path.join(output_dir, "updated_config"), exist_ok=True) - open(updated_config, 'w').write(yaml.dump(c)) + open(updated_config, "w").write(yaml.dump(c)) upgrade_pipeline_to_1_8(updated_config) c = load_yaml_config(updated_config, args.aws_input_creds) overrides = {} - if hasattr(args, 'pipeline_override') and args.pipeline_override: - overrides = { - k: v for d in args.pipeline_override for k, v in d.items()} + if hasattr(args, "pipeline_override") and args.pipeline_override: + overrides = {k: v for d in args.pipeline_override for k, v in d.items()} c = update_nested_dict(c, overrides) if args.anat_only: - c = update_nested_dict(c, {'FROM': 'anat-only'}) + c = update_nested_dict(c, {"FROM": "anat-only"}) if args.user_defined: - c['pipeline_setup']['output_directory']['user_defined'] = args.user_defined + c["pipeline_setup"]["output_directory"]["user_defined"] = args.user_defined c = Configuration(c) # get the aws_input_credentials, if any are specified if args.aws_input_creds: - c['awsCredentialsFile'] = resolve_aws_credential( - args.aws_input_creds) + c["awsCredentialsFile"] = resolve_aws_credential(args.aws_input_creds) if args.aws_output_creds: - c['pipeline_setup']['Amazon-AWS'][ - 'aws_output_bucket_credentials' - ] = resolve_aws_credential( - args.aws_output_creds + c["pipeline_setup"]["Amazon-AWS"]["aws_output_bucket_credentials"] = ( + resolve_aws_credential(args.aws_output_creds) ) - c['pipeline_setup']['output_directory']['path'] = os.path.join( - output_dir, "output") + c["pipeline_setup"]["output_directory"]["path"] = os.path.join( + output_dir, "output" + ) if not output_dir_is_s3: - c['pipeline_setup']['log_directory']['path'] = os.path.join( - output_dir, "log") + c["pipeline_setup"]["log_directory"]["path"] = os.path.join( + output_dir, "log" + ) else: - c['pipeline_setup']['log_directory']['path'] = os.path.join( - DEFAULT_TMP_DIR, "log") + c["pipeline_setup"]["log_directory"]["path"] = os.path.join( + DEFAULT_TMP_DIR, "log" + ) if args.mem_gb: - c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant'] = float(args.mem_gb) + c["pipeline_setup"]["system_config"]["maximum_memory_per_participant"] = ( + float(args.mem_gb) + ) elif args.mem_mb: - c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant'] = float(args.mem_mb) / 1024.0 + c["pipeline_setup"]["system_config"]["maximum_memory_per_participant"] = ( + float(args.mem_mb) / 1024.0 + ) else: try: - c['pipeline_setup', 'system_config', - 'maximum_memory_per_participant'] = float( - c['pipeline_setup', 'system_config', - 'maximum_memory_per_participant']) + c[ + "pipeline_setup", "system_config", "maximum_memory_per_participant" + ] = float( + c[ + "pipeline_setup", + "system_config", + "maximum_memory_per_participant", + ] + ) except KeyError: - c['pipeline_setup', 'system_config', - 'maximum_memory_per_participant'] = 6.0 + c[ + "pipeline_setup", "system_config", "maximum_memory_per_participant" + ] = 6.0 # Preference: n_cpus if given, override if present, else from config if # present, else n_cpus=3 if int(args.n_cpus) == 0: try: - args.n_cpus = c['pipeline_setup', 'system_config', - 'max_cores_per_participant'] + args.n_cpus = c[ + "pipeline_setup", "system_config", "max_cores_per_participant" + ] except KeyError: args.n_cpus = 3 - c['pipeline_setup', 'system_config', - 'max_cores_per_participant'] = int(args.n_cpus) + c["pipeline_setup", "system_config", "max_cores_per_participant"] = int( + args.n_cpus + ) - c['pipeline_setup']['system_config']['num_participants_at_once'] = int( - c['pipeline_setup']['system_config'].get( - 'num_participants_at_once', 1)) + c["pipeline_setup"]["system_config"]["num_participants_at_once"] = int( + c["pipeline_setup"]["system_config"].get("num_participants_at_once", 1) + ) # Reduce cores per participant if cores times participants is more than # available CPUS. n_cpus is a hard upper limit. if ( - c['pipeline_setup']['system_config']['max_cores_per_participant'] * - c['pipeline_setup']['system_config']['num_participants_at_once'] + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] + * c["pipeline_setup"]["system_config"]["num_participants_at_once"] ) > int(args.n_cpus): - c['pipeline_setup']['system_config'][ - 'max_cores_per_participant' - ] = int(args.n_cpus) // c['pipeline_setup']['system_config'][ - 'num_participants_at_once' - ] - if c['pipeline_setup']['system_config'][ - 'max_cores_per_participant' - ] == 0: - c['pipeline_setup']['system_config'][ - 'max_cores_per_participant'] = args.n_cpus - c['pipeline_setup']['system_config'][ - 'num_participants_at_once'] = 1 + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] = ( + int(args.n_cpus) + // c["pipeline_setup"]["system_config"]["num_participants_at_once"] + ) + if c["pipeline_setup"]["system_config"]["max_cores_per_participant"] == 0: + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] = ( + args.n_cpus + ) + c["pipeline_setup"]["system_config"]["num_participants_at_once"] = 1 if int(args.num_ants_threads) == 0: try: - args.num_ants_threads = c['pipeline_setup', 'system_config', - 'num_ants_threads'] + args.num_ants_threads = c[ + "pipeline_setup", "system_config", "num_ants_threads" + ] except KeyError: args.num_ants_threads = 3 - c['pipeline_setup', 'system_config', 'num_ants_threads'] = int( - args.num_ants_threads) + c["pipeline_setup", "system_config", "num_ants_threads"] = int( + args.num_ants_threads + ) - c['pipeline_setup']['system_config']['num_ants_threads'] = min( - c['pipeline_setup']['system_config']['max_cores_per_participant'], - int(c['pipeline_setup']['system_config']['num_ants_threads']) + c["pipeline_setup"]["system_config"]["num_ants_threads"] = min( + c["pipeline_setup"]["system_config"]["max_cores_per_participant"], + int(c["pipeline_setup"]["system_config"]["num_ants_threads"]), ) if args.random_seed: - c['pipeline_setup']['system_config']['random_seed'] = \ - args.random_seed + c["pipeline_setup"]["system_config"]["random_seed"] = args.random_seed - if c['pipeline_setup']['system_config']['random_seed'] is not None: - c['pipeline_setup']['system_config']['random_seed'] = \ - set_up_random_state(c['pipeline_setup']['system_config'][ - 'random_seed']) + if c["pipeline_setup"]["system_config"]["random_seed"] is not None: + c["pipeline_setup"]["system_config"]["random_seed"] = set_up_random_state( + c["pipeline_setup"]["system_config"]["random_seed"] + ) if args.runtime_usage is not None: - c['pipeline_setup']['system_config']['observed_usage'][ - 'callback_log'] = args.runtime_usage + c["pipeline_setup"]["system_config"]["observed_usage"]["callback_log"] = ( + args.runtime_usage + ) if args.runtime_buffer is not None: - c['pipeline_setup']['system_config']['observed_usage'][ - 'buffer'] = args.runtime_buffer + c["pipeline_setup"]["system_config"]["observed_usage"]["buffer"] = ( + args.runtime_buffer + ) if args.save_working_dir is not False: - c['pipeline_setup']['working_directory'][ - 'remove_working_dir'] = False + c["pipeline_setup"]["working_directory"]["remove_working_dir"] = False if isinstance(args.save_working_dir, str): - c['pipeline_setup']['working_directory']['path'] = \ - os.path.abspath(args.save_working_dir) + c["pipeline_setup"]["working_directory"]["path"] = os.path.abspath( + args.save_working_dir + ) elif not output_dir_is_s3: - c['pipeline_setup']['working_directory']['path'] = \ - os.path.join(output_dir, "working") + c["pipeline_setup"]["working_directory"]["path"] = os.path.join( + output_dir, "working" + ) else: - logger.warning('Cannot write working directory to S3 bucket. ' - 'Either change the output directory to something ' - 'local or turn off the --save_working_dir flag') + logger.warning( + "Cannot write working directory to S3 bucket. " + "Either change the output directory to something " + "local or turn off the --save_working_dir flag" + ) if args.fail_fast is not None: - c['pipeline_setup', 'system_config', - 'fail_fast'] = str_to_bool1_1(args.fail_fast) + c["pipeline_setup", "system_config", "fail_fast"] = str_to_bool1_1( + args.fail_fast + ) - if c['pipeline_setup']['output_directory']['quality_control'][ - 'generate_xcpqc_files']: - c['functional_preproc']['motion_estimates_and_correction'][ - 'motion_estimates']['calculate_motion_first'] = True - c['functional_preproc']['motion_estimates_and_correction'][ - 'motion_estimates']['calculate_motion_after'] = True + if c["pipeline_setup"]["output_directory"]["quality_control"][ + "generate_xcpqc_files" + ]: + c["functional_preproc"]["motion_estimates_and_correction"][ + "motion_estimates" + ]["calculate_motion_first"] = True + c["functional_preproc"]["motion_estimates_and_correction"][ + "motion_estimates" + ]["calculate_motion_after"] = True if args.participant_label: print( - "#### Running C-PAC for {0}" - .format(", ".join(args.participant_label)) + "#### Running C-PAC for {0}".format(", ".join(args.participant_label)) ) else: print("#### Running C-PAC") - print("Number of participants to run in parallel: {0}" - .format(c['pipeline_setup']['system_config'][ - 'num_participants_at_once'])) + print( + "Number of participants to run in parallel: {0}".format( + c["pipeline_setup"]["system_config"]["num_participants_at_once"] + ) + ) if args.freesurfer_dir: - c['pipeline_setup']['freesurfer_dir'] = args.freesurfer_dir - + c["pipeline_setup"]["freesurfer_dir"] = args.freesurfer_dir + if not args.data_config_file: - print("Input directory: {0}".format(bids_dir)) - - print("Output directory: {0}".format( - c['pipeline_setup']['output_directory']['path'])) - print("Working directory: {0}".format( - c['pipeline_setup']['working_directory']['path'])) - print("Log directory: {0}".format( - c['pipeline_setup']['log_directory']['path'])) - print("Remove working directory: {0}".format( - c['pipeline_setup']['working_directory']['remove_working_dir'])) - print("Available memory: {0} (GB)".format( - c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant'])) - print("Available threads: {0}".format( - c['pipeline_setup']['system_config']['max_cores_per_participant'])) - print("Number of threads for ANTs: {0}".format( - c['pipeline_setup']['system_config']['num_ants_threads'])) + print(f"Input directory: {bids_dir}") + + print( + "Output directory: {0}".format( + c["pipeline_setup"]["output_directory"]["path"] + ) + ) + print( + "Working directory: {0}".format( + c["pipeline_setup"]["working_directory"]["path"] + ) + ) + print("Log directory: {0}".format(c["pipeline_setup"]["log_directory"]["path"])) + print( + "Remove working directory: {0}".format( + c["pipeline_setup"]["working_directory"]["remove_working_dir"] + ) + ) + print( + "Available memory: {0} (GB)".format( + c["pipeline_setup"]["system_config"]["maximum_memory_per_participant"] + ) + ) + print( + "Available threads: {0}".format( + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] + ) + ) + print( + "Number of threads for ANTs: {0}".format( + c["pipeline_setup"]["system_config"]["num_ants_threads"] + ) + ) # create a timestamp for writing config files # pylint: disable=invalid-name - st = datetime.datetime.now().strftime('%Y-%m-%dT%H-%M-%SZ') + st = datetime.datetime.now().strftime("%Y-%m-%dT%H-%M-%SZ") if args.participant_label: args.participant_label = cl_strip_brackets(args.participant_label) args.participant_label = [ - 'sub-' + pt if not pt.startswith('sub-') else pt + "sub-" + pt if not pt.startswith("sub-") else pt for pt in args.participant_label ] # otherwise we move on to conforming the data configuration if not args.data_config_file: - sub_list = create_cpac_data_config(bids_dir, - args.participant_label, - args.aws_input_creds, - args.skip_bids_validator, - only_one_anat=False) + sub_list = create_cpac_data_config( + bids_dir, + args.participant_label, + args.aws_input_creds, + args.skip_bids_validator, + only_one_anat=False, + ) else: - sub_list = load_cpac_data_config(args.data_config_file, - args.participant_label, - args.aws_input_creds) + sub_list = load_cpac_data_config( + args.data_config_file, args.participant_label, args.aws_input_creds + ) prefilter = list(sub_list) - sub_list = sub_list_filter_by_labels(sub_list, - {'T1w': args.T1w_label, - 'bold': args.bold_label}) + sub_list = sub_list_filter_by_labels( + sub_list, {"T1w": args.T1w_label, "bold": args.bold_label} + ) # C-PAC only handles single anatomical images (for now) # so we take just the first as a string if we have a list for i, sub in enumerate(sub_list): - if isinstance(sub.get('anat'), dict): - for anat_key in sub['anat']: - if( - isinstance(sub['anat'][anat_key], list) and - len(sub['anat'][anat_key]) + if isinstance(sub.get("anat"), dict): + for anat_key in sub["anat"]: + if isinstance(sub["anat"][anat_key], list) and len( + sub["anat"][anat_key] ): - sub_list[i]['anat'][ - anat_key] = sub['anat'][anat_key][0] - if isinstance(sub.get('anat'), list) and len(sub['anat']): - sub_list[i]['anat'] = sub['anat'][0] + sub_list[i]["anat"][anat_key] = sub["anat"][anat_key][0] + if isinstance(sub.get("anat"), list) and len(sub["anat"]): + sub_list[i]["anat"] = sub["anat"][0] if args.participant_ndx is not None: - participant_ndx = int(args.participant_ndx) if participant_ndx == -1: - args.participant_ndx = os.environ['AWS_BATCH_JOB_ARRAY_INDEX'] + args.participant_ndx = os.environ["AWS_BATCH_JOB_ARRAY_INDEX"] if 0 <= participant_ndx < len(sub_list): - print('Processing data for participant {0} ({1})'.format( - args.participant_ndx, - sub_list[participant_ndx]["subject_id"] - )) + print( + "Processing data for participant {0} ({1})".format( + args.participant_ndx, sub_list[participant_ndx]["subject_id"] + ) + ) sub_list = [sub_list[participant_ndx]] data_hash = hash_data_config(sub_list) - data_config_file = (f"cpac_data_config_{data_hash}_idx-" - f"{args.participant_ndx}_{st}.yml") + data_config_file = ( + f"cpac_data_config_{data_hash}_idx-" + f"{args.participant_ndx}_{st}.yml" + ) else: - print("Participant ndx {0} is out of bounds [0, {1})".format( - participant_ndx, - str(len(sub_list)) - )) + print( + f"Participant ndx {participant_ndx} is out of bounds [0, {len(sub_list)!s})" + ) sys.exit(1) else: data_hash = hash_data_config(sub_list) - data_config_file = (f"cpac_data_config_{data_hash}_{st}.yml") + data_config_file = f"cpac_data_config_{data_hash}_{st}.yml" sublogdirs = [set_subject(sub, c)[2] for sub in sub_list] # write out the data configuration file data_config_file = os.path.join(sublogdirs[0], data_config_file) - with open(data_config_file, 'w', encoding='utf-8') as _f: + with open(data_config_file, "w", encoding="utf-8") as _f: noalias_dumper = yaml.dumper.SafeDumper noalias_dumper.ignore_aliases = lambda self, data: True - yaml.dump(sub_list, _f, default_flow_style=False, - Dumper=noalias_dumper) + yaml.dump(sub_list, _f, default_flow_style=False, Dumper=noalias_dumper) # update and write out pipeline config file pipeline_config_file = os.path.join( - sublogdirs[0], f"cpac_pipeline_config_{data_hash}_{st}.yml") - with open(pipeline_config_file, 'w', encoding='utf-8') as _f: + sublogdirs[0], f"cpac_pipeline_config_{data_hash}_{st}.yml" + ) + with open(pipeline_config_file, "w", encoding="utf-8") as _f: _f.write(create_yaml_from_template(c)) - minimized_config = f'{pipeline_config_file[:-4]}_min.yml' - with open(minimized_config, 'w', encoding='utf-8') as _f: - _f.write(create_yaml_from_template(c, import_from='blank')) - for config_file in (data_config_file, pipeline_config_file, - minimized_config): + minimized_config = f"{pipeline_config_file[:-4]}_min.yml" + with open(minimized_config, "w", encoding="utf-8") as _f: + _f.write(create_yaml_from_template(c, import_from="blank")) + for config_file in (data_config_file, pipeline_config_file, minimized_config): os.chmod(config_file, 0o444) # Make config files readonly if len(sublogdirs) > 1: @@ -760,56 +886,68 @@ def run_main(): # file, an identical copy of the data and pipeline config # will be included in the log directory for each run for sublogdir in sublogdirs[1:]: - for config_file in (data_config_file, pipeline_config_file, - minimized_config): + for config_file in ( + data_config_file, + pipeline_config_file, + minimized_config, + ): try: - os.link(config_file, config_file.replace( - sublogdirs[0], sublogdir)) + os.link( + config_file, config_file.replace(sublogdirs[0], sublogdir) + ) except FileExistsError: pass if args.analysis_level in ["participant", "test_config"]: # build pipeline easy way - from CPAC.utils.monitoring import monitor_server import CPAC.pipeline.cpac_runner + from CPAC.utils.monitoring import monitor_server monitoring = None if args.monitoring: try: monitoring = monitor_server( - c['pipeline_setup']['pipeline_name'], - c['pipeline_setup']['log_directory']['path'] + c["pipeline_setup"]["pipeline_name"], + c["pipeline_setup"]["log_directory"]["path"], ) except: pass plugin_args = { - 'n_procs': int(c['pipeline_setup']['system_config'][ - 'max_cores_per_participant']), - 'memory_gb': int(c['pipeline_setup']['system_config'][ - 'maximum_memory_per_participant']), - 'raise_insufficient': c['pipeline_setup']['system_config'][ - 'raise_insufficient'], - 'status_callback': log_nodes_cb + "n_procs": int( + c["pipeline_setup"]["system_config"]["max_cores_per_participant"] + ), + "memory_gb": int( + c["pipeline_setup"]["system_config"][ + "maximum_memory_per_participant" + ] + ), + "raise_insufficient": c["pipeline_setup"]["system_config"][ + "raise_insufficient" + ], + "status_callback": log_nodes_cb, } - if c['pipeline_setup']['system_config']['observed_usage'][ - 'callback_log'] is not None: - plugin_args['runtime'] = { - 'usage': c['pipeline_setup']['system_config'][ - 'observed_usage']['callback_log'], - 'buffer': c['pipeline_setup']['system_config'][ - 'observed_usage']['buffer']} + if ( + c["pipeline_setup"]["system_config"]["observed_usage"]["callback_log"] + is not None + ): + plugin_args["runtime"] = { + "usage": c["pipeline_setup"]["system_config"]["observed_usage"][ + "callback_log" + ], + "buffer": c["pipeline_setup"]["system_config"]["observed_usage"][ + "buffer" + ], + } print("Starting participant level processing") exitcode = CPAC.pipeline.cpac_runner.run( data_config_file, pipeline_config_file, - plugin='MultiProc' if plugin_args[ - 'n_procs' - ] > 1 else 'Linear', + plugin="MultiProc" if plugin_args["n_procs"] > 1 else "Linear", plugin_args=plugin_args, tracking=not args.tracking_opt_out, - test_config=args.analysis_level == "test_config" + test_config=args.analysis_level == "test_config", ) if monitoring: @@ -818,24 +956,26 @@ def run_main(): if args.analysis_level == "test_config": if exitcode == 0: logger.info( - '\nPipeline and data configuration files should' - ' have been written to %s and %s respectively.\n', - pipeline_config_file, data_config_file) + "\nPipeline and data configuration files should" + " have been written to %s and %s respectively.\n", + pipeline_config_file, + data_config_file, + ) # wait to import `LOGTAIL` here so it has any runtime updates from CPAC.utils.monitoring import LOGTAIL - for warning in LOGTAIL['warnings']: - logger.warning('%s\n', warning.rstrip()) + + for warning in LOGTAIL["warnings"]: + logger.warning("%s\n", warning.rstrip()) sys.exit(exitcode) -if __name__ == '__main__': +if __name__ == "__main__": try: run_main() except Exception as exception: # if we hit an exception before the pipeline starts to build but # we're still able to create a logfile, log the error in the file - failed_to_start(sys.argv[2] if len(sys.argv) > 2 else os.getcwd(), - exception) + failed_to_start(sys.argv[2] if len(sys.argv) > 2 else os.getcwd(), exception) raise exception From eef356908b9bd1da4bd35dbfb76ef045d289c590 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 4 Nov 2024 21:59:19 -0500 Subject: [PATCH 19/58] =?UTF-8?q?=F0=9F=9A=9A=20Move=20entrypoint=20script?= =?UTF-8?q?s=20into=20CPAC/=5Fentrypoints=20in=20preparation=20for=20backm?= =?UTF-8?q?erge?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Squashed commit of the following: commit 9ea9b068c316de0fd2f945b18faa766f95adfa19 Author: Jon Clucas Date: Mon Nov 4 21:57:33 2024 -0500 🚚 Move entrypoint scripts into CPAC/_entrypoints Ref 38a0d104521f1ef0de8c4cf4c03b8e445c99ff94 --- {dev/docker_data => CPAC/_entrypoints}/run-with-freesurfer.sh | 2 +- {dev/docker_data => CPAC/_entrypoints}/run.py | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename {dev/docker_data => CPAC/_entrypoints}/run-with-freesurfer.sh (97%) rename {dev/docker_data => CPAC/_entrypoints}/run.py (100%) diff --git a/dev/docker_data/run-with-freesurfer.sh b/CPAC/_entrypoints/run-with-freesurfer.sh similarity index 97% rename from dev/docker_data/run-with-freesurfer.sh rename to CPAC/_entrypoints/run-with-freesurfer.sh index 440c6a47bb..b1551b4512 100755 --- a/dev/docker_data/run-with-freesurfer.sh +++ b/CPAC/_entrypoints/run-with-freesurfer.sh @@ -9,4 +9,4 @@ # You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . source $FREESURFER_HOME/SetUpFreeSurfer.sh -/code/run.py "$@" \ No newline at end of file +/code/run.py "$@" diff --git a/dev/docker_data/run.py b/CPAC/_entrypoints/run.py similarity index 100% rename from dev/docker_data/run.py rename to CPAC/_entrypoints/run.py From dc5663cba24aa344dc2745112bdf01516e332b71 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 4 Nov 2024 23:07:19 -0500 Subject: [PATCH 20/58] :twisted_rightwards_arrows: Merge changes from branch 'feature/check_orientations' into 'many_pipelines' --- CHANGELOG.md | 47 +- CPAC/anat_preproc/anat_preproc.py | 93 ++-- CPAC/anat_preproc/lesion_preproc.py | 34 +- CPAC/func_preproc/func_preproc.py | 177 ++++++-- .../longitudinal_workflow.py | 40 +- CPAC/pipeline/engine.py | 428 +++++++++--------- CPAC/pipeline/schema.py | 56 ++- CPAC/pipeline/test/test_engine.py | 2 +- CPAC/registration/registration.py | 383 +++++++++------- CPAC/registration/tests/mocks.py | 4 +- CPAC/registration/tests/test_registration.py | 5 +- .../configs/pipeline_config_blank.yml | 3 + CPAC/resources/tests/test_templates.py | 13 +- CPAC/utils/datasource.py | 270 +++++------ CPAC/utils/test_mocks.py | 12 +- 15 files changed, 909 insertions(+), 658 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8a23221ea..be5ec4a432 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,14 +18,54 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- `pyproject.toml` file with `[build-system]` defined. +- [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/FCP-INDI/C-PAC/main.svg)](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md). +- `desired_orientation` key in participant-level pipeline config under `pipeline_setup`. +- Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. +- Required positional parameter "orientation" to `resolve_resolution`. +- Optional positional argument "cfg" to `create_lesion_preproc`. + +### Changed + +- Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. +- Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. +- Made orientation configurable (was hard-coded as "RPI"). + +### Fixed + +- A bug in which AWS S3 encryption was looked for in Nipype config instead of pipeline config (only affected uploading logs). +- Restored `bids-validator` functionality. +- Fixed empty `shell` variable in cluster run scripts. +- A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows. + +### Removed + +- Variant image recipes. + - `ABCD-HCP` + - `fMRIPrep-LTS` +- Typehinting support for Python < 3.10. + +## [1.8.7] - 2024-05-03 + +### Added + - `Robustfov` feature in `FSL-BET` to crop images ensuring removal of neck regions that may appear in the skull-stripped images. -- Ability to throttle nodes, estimating all available memory when threading +- Ability to throttle nodes, estimating all available memory when threading. +- Ability to configure FreeSurfer ingress from the command line. ### Changed +- The ABCD-pipeline based surface post-processing workflows have been modularized to be more robust, resolving a running issue with this part of the pipeline stalling or crashing in some runs. - Moved autoversioning from CI to pre-commit - Updated `FSL-BET` config to default `-mask-boolean` flag as on, and removed all removed `mask-boolean` keys from configs. -- Added `dvars` as optional output in `cpac_outputs` +- Added `dvars` as optional output in `cpac_outputs`. + +### Fixed + +- Fixed a bug where ingressing fmriprep outputs into C-PAC with a blank nuisance confounds field in the C-PAC pipeline configuration file would cause a crash. +- Fixed a bug where spatial smoothing and z-scoring of final outputs would sometimes fail to run when running a C-PAC pipeline that would ingress fmriprep outputs. +- Fixed a bug where ingress of distortion correction-related field map metadata would sometimes fail to recognize both echo times, when there were two present, leading to an error message claiming an echo time is missing. +- Changed an extraneous default pipeline configuration setting - `surface_connectivity` is now disabled in the default configuration as intended. ## [1.8.6] - 2024-01-15 @@ -285,7 +325,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 See [Version 1.8.1 Beta](https://fcp-indi.github.io/docs/user/release_notes/v1.8.1) for release notes for v1.8.1 and [Release Notes](https://fcp-indi.github.io/docs/user/release_notes) for all release notes back to v0.1.1. -[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.6...develop +[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.7...develop +[1.8.7]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.7 [1.8.6]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.6 [1.8.5]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.5 [1.8.4]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.4 diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index b37aebe003..a561f8e077 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -# from copy import deepcopy import os from nipype.interfaces import afni, ants, freesurfer, fsl @@ -36,6 +35,7 @@ ) from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge @@ -86,7 +86,7 @@ def acpc_alignment( elif config.anatomical_preproc["acpc_alignment"]["FOV_crop"] == "flirt": # robustfov doesn't work on some monkey data. prefer using flirt. # ${FSLDIR}/bin/flirt -in "${Input}" -applyxfm -ref "${Input}" -omat "$WD"/roi2full.mat -out "$WD"/robustroi.nii.gz - # adopted from DCAN NHP https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/ACPCAlignment.sh#L80-L81 + # adopted from DCAN NHP https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/PreFreeSurfer/scripts/ACPCAlignment.sh#L80-L81 flirt_fov = pe.Node(interface=fsl.FLIRT(), name="anat_acpc_1_fov") flirt_fov.inputs.args = "-applyxfm" @@ -138,7 +138,7 @@ def acpc_alignment( aff_to_rig_imports = ["import os", "from numpy import *"] aff_to_rig = pe.Node( - util.Function( + Function( input_names=["in_xfm", "out_name"], output_names=["out_mat"], function=fsl_aff_to_rigid, @@ -198,7 +198,7 @@ def acpc_alignment( def T2wToT1wReg(wf_name="T2w_to_T1w_reg"): # Adapted from DCAN lab - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/T2wToT1wReg.sh + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/PreFreeSurfer/scripts/T2wToT1wReg.sh preproc = pe.Workflow(name=wf_name) @@ -240,7 +240,7 @@ def T2wToT1wReg(wf_name="T2w_to_T1w_reg"): def BiasFieldCorrection_sqrtT1wXT1w(config=None, wf_name="biasfield_correction_t1t2"): # Adapted from DCAN lab - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/BiasFieldCorrection_sqrtT1wXT1w.sh + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/PreFreeSurfer/scripts/BiasFieldCorrection_sqrtT1wXT1w.sh preproc = pe.Workflow(name=wf_name) @@ -319,7 +319,7 @@ def T1wmulT2w_brain_norm_s_string(sigma, in_file): return "-s %f -div %s" % (sigma, in_file) T1wmulT2w_brain_norm_s_string = pe.Node( - util.Function( + Function( input_names=["sigma", "in_file"], output_names=["out_str"], function=T1wmulT2w_brain_norm_s_string, @@ -378,7 +378,7 @@ def form_lower_string(mean, std): return "-thr %s -bin -ero -mul 255" % (lower) form_lower_string = pe.Node( - util.Function( + Function( input_names=["mean", "std"], output_names=["out_str"], function=form_lower_string, @@ -441,10 +441,10 @@ def form_lower_string(mean, std): # 6. Use bias field output to create corrected images def file_to_a_list(infile_1, infile_2): - return list([infile_1, infile_2]) + return [infile_1, infile_2] file_to_a_list = pe.Node( - util.Function( + Function( input_names=["infile_1", "infile_2"], output_names=["out_list"], function=file_to_a_list, @@ -544,7 +544,7 @@ def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt): ) skullstrip_args = pe.Node( - util.Function( + Function( input_names=[ "spat_norm", "spat_norm_dxyz", @@ -762,7 +762,7 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): anat_robustfov.inputs.output_type = "NIFTI_GZ" anat_pad_RobustFOV_cropped = pe.Node( - util.Function( + Function( input_names=["cropped_image_path", "target_image_path"], output_names=["padded_image_path"], function=pad, @@ -891,8 +891,9 @@ def niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): """ - UNet - options (following numbers are default): + UNet options. + + Following numbers are default: input_slice: 3 conv_block: 5 kernel_root: 16 @@ -901,7 +902,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): from CPAC.unet.function import predict_volumes unet_mask = pe.Node( - util.Function( + Function( input_names=["model_path", "cimg_in"], output_names=["out_path"], function=predict_volumes, @@ -939,9 +940,9 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(unet_mask, "out_path", unet_masked_brain, "operand_files") - # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc + # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc native_brain_to_template_brain = pe.Node( - interface=fsl.FLIRT(), name=f"native_brain_to_template_" f"brain_{pipe_num}" + interface=fsl.FLIRT(), name=f"native_brain_to_template_brain_{pipe_num}" ) native_brain_to_template_brain.inputs.dof = 6 native_brain_to_template_brain.inputs.interp = "sinc" @@ -950,9 +951,9 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, native_brain_to_template_brain, "reference") - # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat + # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat native_head_to_template_head = pe.Node( - interface=fsl.FLIRT(), name=f"native_head_to_template_" f"head_{pipe_num}" + interface=fsl.FLIRT(), name=f"native_head_to_template_head_{pipe_num}" ) native_head_to_template_head.inputs.apply_xfm = True @@ -974,7 +975,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): node, out = strat_pool.get_data("T1w-template") wf.connect(node, out, native_head_to_template_head, "reference") - # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz + # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz template_brain_mask = pe.Node( interface=fsl.maths.MathsCommand(), name=f"template_brain_mask_{pipe_num}" ) @@ -985,7 +986,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # ANTS 3 -m CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000 ants_template_head_to_template = pe.Node( - interface=ants.Registration(), name=f"template_head_to_" f"template_{pipe_num}" + interface=ants.Registration(), name=f"template_head_to_template_{pipe_num}" ) ants_template_head_to_template.inputs.metric = ["CC"] ants_template_head_to_template.inputs.metric_weight = [1, 5] @@ -1006,7 +1007,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, ants_template_head_to_template, "moving_image") - # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz + # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz template_head_transform_to_template = pe.Node( interface=ants.ApplyTransforms(), name=f"template_head_transform_to_template_{pipe_num}", @@ -1032,14 +1033,14 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): "transforms", ) - # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat + # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") invt.inputs.invert_xfm = True wf.connect(native_brain_to_template_brain, "out_matrix_file", invt, "in_file") # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat template_brain_to_native_brain = pe.Node( - interface=fsl.FLIRT(), name=f"template_brain_to_native_" f"brain_{pipe_num}" + interface=fsl.FLIRT(), name=f"template_brain_to_native_brain_{pipe_num}" ) template_brain_to_native_brain.inputs.apply_xfm = True wf.connect( @@ -1054,9 +1055,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(invt, "out_file", template_brain_to_native_brain, "in_matrix_file") # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz - refined_mask = pe.Node( - interface=fsl.Threshold(), name=f"refined_mask" f"_{pipe_num}" - ) + refined_mask = pe.Node(interface=fsl.Threshold(), name=f"refined_mask_{pipe_num}") refined_mask.inputs.thresh = 0.5 refined_mask.inputs.args = "-bin" wf.connect(template_brain_to_native_brain, "out_file", refined_mask, "in_file") @@ -1084,7 +1083,7 @@ def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # convert brain mask file from .mgz to .nii.gz fs_brain_mask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"fs_brainmask_to_nifti_{pipe_num}", @@ -1115,12 +1114,12 @@ def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): """ - ABCD harmonization - anatomical brain mask generation + ABCD harmonization - anatomical brain mask generation. - Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 + Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/7927754/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 """ wmparc_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file", "reslice_like", "args"], output_names=["out_file"], function=mri_convert, @@ -1131,7 +1130,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Register wmparc file if ingressing FreeSurfer data if strat_pool.check_rpool("pipeline-fs_xfm"): wmparc_to_native = pe.Node( - util.Function( + Function( input_names=["source_file", "target_file", "xfm", "out_file"], output_names=["transformed_file"], function=normalize_wmparc, @@ -1169,7 +1168,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(wmparc_to_nifti, "out_file", binary_mask, "in_file") wb_command_fill_holes = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=wb_command ), name=f"wb_command_fill_holes_{pipe_num}", @@ -1207,7 +1206,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz convert_fs_brainmask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_brainmask_to_nifti_{node_id}", @@ -1218,7 +1217,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/T1.mgz -ot nii T1.nii.gz convert_fs_T1_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_T1_to_nifti_{node_id}", @@ -1234,7 +1233,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - reorient_fs_brainmask.inputs.orientation = "RPI" + reorient_fs_brainmask.inputs.orientation = cfg.pipeline_setup["desired_orientation"] reorient_fs_brainmask.inputs.outputtype = "NIFTI_GZ" wf.connect( @@ -1256,7 +1255,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - reorient_fs_T1.inputs.orientation = "RPI" + reorient_fs_T1.inputs.orientation = cfg.pipeline_setup["desired_orientation"] reorient_fs_T1.inputs.outputtype = "NIFTI_GZ" wf.connect(convert_fs_T1_to_nifti, "out_file", reorient_fs_T1, "in_file") @@ -1385,7 +1384,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def mask_T2(wf_name="mask_T2"): # create T2 mask based on T1 mask - # reference https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreliminaryMasking/macaque_masking.py + # reference https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/83512b0/PreliminaryMasking/macaque_masking.py preproc = pe.Workflow(name=wf_name) @@ -1461,7 +1460,7 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - anat_reorient.inputs.orientation = "RPI" + anat_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] anat_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file") @@ -2269,7 +2268,7 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None): mem_gb=0, mem_x=(0.0115, "in_file", "t"), ) - T2_reorient.inputs.orientation = "RPI" + T2_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] T2_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file") @@ -2829,7 +2828,7 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): ) ### ABCD Harmonization ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/FreeSurfer/FreeSurferPipeline.sh#L140-L144 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/9a02c47/FreeSurfer/FreeSurferPipeline.sh#L140-L144 # flirt -interp spline -in "$T1wImage" -ref "$T1wImage" -applyisoxfm 1 -out "$T1wImageFile"_1mm.nii.gz resample_head_1mm = pe.Node( @@ -2889,7 +2888,7 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): # fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz normalize_head = pe.Node( - util.Function( + Function( input_names=["in_file", "number", "out_file_suffix"], output_names=["out_file"], function=fslmaths_command, @@ -2980,7 +2979,7 @@ def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None): def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extraction"): ### ABCD Harmonization - FNIRT-based brain extraction ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/scripts/BrainExtraction_FNIRTbased.sh + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/4d9996b/PreFreeSurfer/scripts/BrainExtraction_FNIRTbased.sh preproc = pe.Workflow(name=wf_name) @@ -3101,7 +3100,7 @@ def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extract def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction"): ### ABCD Harmonization - FAST bias field correction ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L688-L694 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/9291324/PreFreeSurfer/PreFreeSurferPipeline.sh#L688-L694 preproc = pe.Workflow(name=wf_name) @@ -3186,7 +3185,7 @@ def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction" ) def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None): ### ABCD Harmonization - Myelin Map ### - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L655-L656 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/9291324/PreFreeSurfer/PreFreeSurferPipeline.sh#L655-L656 # fslmerge -t ${T1wFolder}/xfms/${T1wImage}_dc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc ${T1wFolder}/${T1wImage}_acpc merge_t1_acpc_to_list = pe.Node( util.Merge(3), name=f"merge_t1_acpc_to_list_{pipe_num}" @@ -3212,7 +3211,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(merge_t1_acpc, "merged_file", multiply_t1_acpc_by_zero, "in_file") - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L157 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/7927754/PostFreeSurfer/PostFreeSurferPipeline.sh#L157 # convertwarp --relout --rel --ref="$T1wFolder"/"$T1wImageBrainMask" --premat="$T1wFolder"/xfms/"$InitialT1wTransform" \ # --warp1="$T1wFolder"/xfms/"$dcT1wTransform" --out="$T1wFolder"/xfms/"$OutputOrigT1wToT1w" convertwarp_orig_t1_to_t1 = pe.Node( @@ -3229,7 +3228,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(node, out, convertwarp_orig_t1_to_t1, "premat") wf.connect(multiply_t1_acpc_by_zero, "out_file", convertwarp_orig_t1_to_t1, "warp1") - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 # applywarp --rel --interp=spline -i "$BiasField" -r "$T1wImageBrain" -w "$AtlasTransform" -o "$BiasFieldOutput" applywarp_biasfield = pe.Node( interface=fsl.ApplyWarp(), name=f"applywarp_biasfield_{pipe_num}" @@ -3255,7 +3254,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None threshold_biasfield.inputs.op_string = "-thr 0.1" wf.connect(applywarp_biasfield, "out_file", threshold_biasfield, "in_file") - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 # applywarp --rel --interp=spline -i "$OrginalT1wImage" -r "$T1wImageBrain" -w "$OutputOrigT1wToT1w" -o "$OutputT1wImage" applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), name=f"applywarp_t1_{pipe_num}") diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index 2ef58c3d2a..21628c97f0 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -1,13 +1,30 @@ # -*- coding: utf-8 -*- +# Copyright (C) 2019-2023 C-PAC Developers +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import afni import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def inverse_lesion(lesion_path): - """ + """Replace non-zeroes with zeroes and zeroes with ones. + Check if the image contains more zeros than non-zeros, if so, replaces non-zeros by zeros and zeros by ones. @@ -38,13 +55,12 @@ def inverse_lesion(lesion_path): nii = nu.inverse_nifti_values(image=lesion_path) nib.save(nii, lesion_out) return lesion_out - else: - return lesion_out + return lesion_out -def create_lesion_preproc(wf_name="lesion_preproc"): - """ - The main purpose of this workflow is to process lesions masks. +def create_lesion_preproc(cfg=None, wf_name="lesion_preproc"): + """Process lesions masks. + Lesion mask file is deobliqued and reoriented in the same way as the T1 in the anat_preproc function. @@ -95,7 +111,7 @@ def create_lesion_preproc(wf_name="lesion_preproc"): lesion_deoblique.inputs.deoblique = True lesion_inverted = pe.Node( - interface=util.Function( + interface=Function( input_names=["lesion_path"], output_names=["lesion_out"], function=inverse_lesion, @@ -117,7 +133,9 @@ def create_lesion_preproc(wf_name="lesion_preproc"): mem_x=(0.0115, "in_file", "t"), ) - lesion_reorient.inputs.orientation = "RPI" + lesion_reorient.inputs.orientation = ( + cfg.pipeline_setup["desired_orientation"] if cfg else "RPI" + ) lesion_reorient.inputs.outputtype = "NIFTI_GZ" preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file") diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index fef6a01024..2340333ca6 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -14,18 +14,16 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Functional preprocessing""" +"""Functional preprocessing.""" # pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position -from nipype import logging from nipype.interfaces import afni, ants, fsl, utility as util - -logger = logging.getLogger("nipype.workflow") from nipype.interfaces.afni import preprocess, utils as afni_utils from CPAC.func_preproc.utils import nullify from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.ants import ( AI, # niworkflows PrintHeader, @@ -35,6 +33,7 @@ def collect_arguments(*args): + """Collect arguments.""" command_args = [] if args[0]: command_args += [args[1]] @@ -43,6 +42,7 @@ def collect_arguments(*args): def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): + """Generate an anatomically refined mask.""" wf = pe.Workflow(name=wf_name) input_node = pe.Node( @@ -77,7 +77,7 @@ def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): wf.connect(func_single_volume, "out_file", func_tmp_brain, "in_file_a") # 2.1 get a tmp func brain mask - if init_bold_mask == True: + if init_bold_mask: # 2.1.1 N4BiasFieldCorrection single volume of raw_func func_single_volume_n4_corrected = pe.Node( interface=ants.N4BiasFieldCorrection( @@ -165,7 +165,7 @@ def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): wf.connect(reg_anat_mask_to_func, "out_file", func_mask, "operand_files") - if init_bold_mask == True: + if init_bold_mask: wf.connect(func_tmp_brain_mask_dil, "out_file", func_mask, "in_file") else: wf.connect(input_node, "init_func_brain_mask", func_mask, "in_file") @@ -176,7 +176,10 @@ def anat_refined_mask(init_bold_mask=True, wf_name="init_bold_mask"): def anat_based_mask(wf_name="bold_mask"): - """Reference `DCAN lab BOLD mask `_""" + """Generate a functional mask from anatomical data. + + Reference `DCAN lab BOLD mask `_. + """ wf = pe.Workflow(name=wf_name) input_node = pe.Node( @@ -341,7 +344,7 @@ def create_wf_edit_func(wf_name="edit_func"): # allocate a node to check that the requested edits are # reasonable given the data func_get_idx = pe.Node( - util.Function( + Function( input_names=["in_files", "stop_idx", "start_idx"], output_names=["stopidx", "startidx"], function=get_idx, @@ -379,6 +382,7 @@ def create_wf_edit_func(wf_name="edit_func"): def slice_timing_wf(name="slice_timing", tpattern=None, tzero=None): + """Calculate corrected slice-timing.""" # allocate a workflow object wf = pe.Workflow(name=name) @@ -443,11 +447,10 @@ def slice_timing_wf(name="slice_timing", tpattern=None, tzero=None): def get_idx(in_files, stop_idx=None, start_idx=None): - """ - Method to get the first and the last slice for - the functional run. It verifies the user specified - first and last slice. If the values are not valid, it - calculates and returns the very first and the last slice + """Get the first and the last slice for the functional run. + + Verify the user specified first and last slice. If the values are not valid, + calculate and return the very first and the last slice. Parameters ---------- @@ -480,12 +483,12 @@ def get_idx(in_files, stop_idx=None, start_idx=None): shape = hdr.get_data_shape() # Check to make sure the input file is 4-dimensional - if len(shape) != 4: + if len(shape) != 4: # noqa: PLR2004 raise TypeError("Input nifti file: %s is not a 4D file" % in_files) # Grab the number of volumes nvols = int(hdr.get_data_shape()[3]) - if (start_idx == None) or (int(start_idx) < 0) or (int(start_idx) > (nvols - 1)): + if (start_idx is None) or (int(start_idx) < 0) or (int(start_idx) > (nvols - 1)): startidx = 0 else: startidx = int(start_idx) @@ -506,6 +509,7 @@ def get_idx(in_files, stop_idx=None, start_idx=None): outputs=["desc-preproc_bold", "desc-reorient_bold"], ) def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): + """Reorient functional timeseries.""" func_deoblique = pe.Node( interface=afni_utils.Refit(), name=f"func_deoblique_{pipe_num}", @@ -524,7 +528,7 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): mem_x=(0.0115, "in_file", "t"), ) - func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] func_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(func_deoblique, "out_file", func_reorient, "in_file") @@ -545,6 +549,7 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_bold"], ) def func_scaling(wf, cfg, strat_pool, pipe_num, opt=None): + """Scale functional timeseries.""" scale_func_wf = create_scale_func_wf( scaling_factor=cfg.scaling_factor, wf_name=f"scale_func_{pipe_num}" ) @@ -568,6 +573,7 @@ def func_scaling(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): + """Truncate functional timeseries.""" # if cfg.functional_preproc['truncation']['start_tr'] == 0 and \ # cfg.functional_preproc['truncation']['stop_tr'] == None: # data, key = strat_pool.get_data("desc-preproc_bold", @@ -603,6 +609,7 @@ def func_truncate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_despike(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate de-spiked functional timeseries in native space with AFNI.""" despike = pe.Node( interface=preprocess.Despike(), name=f"func_despiked_{pipe_num}", @@ -645,6 +652,7 @@ def func_despike(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_despike_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate de-spiked functional timeseries in template space with AFNI.""" despike = pe.Node( interface=preprocess.Despike(), name=f"func_despiked_template_{pipe_num}", @@ -699,8 +707,9 @@ def func_despike_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def func_slice_time(wf, cfg, strat_pool, pipe_num, opt=None): + """Genetare slice-time correctied timeseries.""" slice_time = slice_timing_wf( - name="func_slice_timing_correction_" f"{pipe_num}", + name=f"func_slice_timing_correction_{pipe_num}", tpattern=cfg.functional_preproc["slice_timing_correction"]["tpattern"], tzero=cfg.functional_preproc["slice_timing_correction"]["tzero"], ) @@ -738,6 +747,7 @@ def func_slice_time(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def bold_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate a functional mask with AFNI.""" func_get_brain_mask = pe.Node( interface=preprocess.Automask(), name=f"func_get_brain_mask_AFNI_{pipe_num}" ) @@ -763,6 +773,7 @@ def bold_mask_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-bold_desc-brain_mask"], ) def bold_mask_fsl(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate functional mask with FSL.""" inputnode_bet = pe.Node( util.IdentityInterface( fields=[ @@ -867,7 +878,7 @@ def form_thr_string(thr): return "-thr %s" % (threshold_z) form_thr_string = pe.Node( - util.Function( + Function( input_names=["thr"], output_names=["out_str"], function=form_thr_string, @@ -938,12 +949,105 @@ def form_thr_string(thr): "FSL-AFNI-brain-mask", "FSL-AFNI-brain-probseg", ], - outputs=["space-bold_desc-brain_mask", "desc-ref_bold"], + outputs={ + "space-bold_desc-brain_mask": { + "Description": "mask of the skull-stripped input file" + }, + "desc-ref_bold": { + "Description": "the ``bias_corrected_file`` after skull-stripping" + }, + }, ) def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): - """fMRIPrep-style BOLD mask - `Ref `_ + """fMRIPrep-style BOLD mask. + + Enhance and run brain extraction on a BOLD EPI image. + + This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` + :abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image + averaging non-steady-state timepoints), and sharpens the histogram + with the application of the N4 algorithm for removing the + :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal + mask. + + Steps of this workflow are: + + [1]. Binary dilation of the tentative mask with a sphere of 3mm diameter. + [2]. Run ANTs' ``N4BiasFieldCorrection`` on the input + :abbr:`BOLD (blood-oxygen level-dependant)` average, using the + mask generated in 1) instead of the internal Otsu thresholding. + [3]. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology + dilation of one iteration and a sphere of 6mm as structuring element. + [4]. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image + with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` + to *standardize* the T2* contrast distribution. + [5]. Calculate a mask using AFNI's ``3dAutomask`` after the contrast + enhancement of 4). + [6]. Calculate a final mask as the intersection of 4) and 6). + [7]. Apply final mask on the enhanced reference. + + `Ref `_. """ + # STATEMENT OF CHANGES: + # This function is derived from sources licensed under the Apache-2.0 terms, + # and this function has been changed. + + # CHANGES: + # * Converted from a plain function to a CPAC.pipeline.nodeblock.NodeBlockFunction + # * Removed Registration version check + # * Hardcoded Registration parameters instead of loading epi_atlasbased_brainmask.json + # * Uses C-PAC's ``FSL-AFNI-brain-probseg`` template in place of ``templateflow.api.get("MNI152NLin2009cAsym", resolution=1, label="brain", suffix="probseg")`` + # * Replaced niworkflows.interfaces.nibabel.Binarize with fsl.maths.MathsCommand and hardcoded threshold + # * Replaced niworkflows.interfaces.images.MatchHeader with CPAC.utils.interfaces.ants.(PrintHeader and SetDirectionByMatrix) + # * Removed header fix for unifize + # * Removed header fix for skullstrip_second_pass + # * Removed ``if not pre_mask`` conditional block + # * Modified docstring to reflect local changes + # * Refactored some variables and connections and updated style to match C-PAC codebase + + # ORIGINAL WORK'S ATTRIBUTION NOTICE: + # Copyright (c) 2016, the CRN developers team. + # All rights reserved. + + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are met: + + # * Redistributions of source code must retain the above copyright notice, this + # list of conditions and the following disclaimer. + + # * Redistributions in binary form must reproduce the above copyright notice, + # this list of conditions and the following disclaimer in the documentation + # and/or other materials provided with the distribution. + + # * Neither the name of niworkflows nor the names of its + # contributors may be used to endorse or promote products derived from + # this software without specific prior written permission. + + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + # Modifications copyright (C) 2021 - 2024 C-PAC Developers + # Initialize transforms with antsAI init_aff = pe.Node( AI( @@ -1040,6 +1144,7 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): n_procs=1, ) + # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node( fsl.BET(frac=0.2, mask=True, functional=False), name=f"skullstrip_first_pass_{pipe_num}", @@ -1055,8 +1160,9 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): name=f"skullstrip_first_dilate_{pipe_num}", ) - bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_" f"{pipe_num}") + bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_{pipe_num}") + # Use AFNI's unifize for T2 constrast unifize = pe.Node( afni_utils.Unifize( t2=True, @@ -1067,15 +1173,18 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): name=f"unifize_{pipe_num}", ) + # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node( preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"), name=f"skullstrip_second_pass_{pipe_num}", ) + # Take intersection of both masks combine_masks = pe.Node( fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}" ) + # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}") node, out = strat_pool.get_data(["motion-basefile"]) @@ -1140,6 +1249,7 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-bold_desc-brain_mask"], ) def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate the BOLD mask by basing it off of the refined anatomical brain mask.""" # binarize anat mask, in case it is not a binary mask. anat_brain_mask_bin = pe.Node( interface=fsl.ImageMaths(), name=f"anat_brain_mask_bin_{pipe_num}" @@ -1180,7 +1290,7 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): mem_x=(0.0115, "in_file", "t"), ) - func_reorient.inputs.orientation = "RPI" + func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"] func_reorient.inputs.outputtype = "NIFTI_GZ" wf.connect(func_deoblique, "out_file", func_reorient, "in_file") @@ -1206,7 +1316,7 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): # refined_bold_mask : input motion corrected func refined_bold_mask = anat_refined_mask( - init_bold_mask=False, wf_name="refined_bold_mask" f"_{pipe_num}" + init_bold_mask=False, wf_name=f"refined_bold_mask_{pipe_num}" ) node, out = strat_pool.get_data(["desc-preproc_bold", "bold"]) @@ -1284,7 +1394,8 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None): ) def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): """Generate the BOLD mask by basing it off of the anatomical brain mask. - Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. + + Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. """ # 0. Take single volume of func func_single_volume = pe.Node(interface=afni.Calc(), name="func_single_volume") @@ -1452,7 +1563,7 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): "desc-preproc_bold", "T1w-template-funcreg", "space-template_desc-preproc_T1w", - "space-template_desc-brain_mask", + "space-template_desc-T1w_mask", ], outputs=[ "space-template_res-bold_desc-brain_T1w", @@ -1461,8 +1572,9 @@ def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num): ], ) def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): - """Resample anatomical brain mask in standard space to get BOLD brain mask in standard space - Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. + """Resample anatomical brain mask to get BOLD brain mask in standard space. + + Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_. """ anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num) @@ -1480,7 +1592,7 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num ) - node, out = strat_pool.get_data("space-template_desc-brain_mask") + node, out = strat_pool.get_data("space-template_desc-T1w_mask") wf.connect( node, out, anat_brain_mask_to_func_res, "inputspec.space-template_desc-T1w_mask" ) @@ -1544,7 +1656,8 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None): ) def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): """Generate the BOLD mask by basing it off of the anatomical brain. - Adapted from `the BOLD mask method from the CCS pipeline `_. + + Adapted from `the BOLD mask method from the CCS pipeline `_. """ # Run 3dAutomask to generate func initial mask func_tmp_brain_mask = pe.Node( @@ -1680,6 +1793,7 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate a functional brain mask.""" func_edge_detect = pe.Node( interface=afni_utils.Calc(), name=f"func_extract_brain_{pipe_num}" ) @@ -1711,6 +1825,7 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-mean_bold"], ) def func_mean(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate a mean functional image.""" func_mean = pe.Node(interface=afni_utils.TStat(), name=f"func_mean_{pipe_num}") func_mean.inputs.options = "-mean" @@ -1734,6 +1849,7 @@ def func_mean(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["desc-preproc_bold"], ) def func_normalize(wf, cfg, strat_pool, pipe_num, opt=None): + """Normalize a functional image.""" func_normalize = pe.Node( interface=fsl.ImageMaths(), name=f"func_normalize_{pipe_num}", @@ -1759,6 +1875,7 @@ def func_normalize(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["space-bold_desc-brain_mask"], ) def func_mask_normalize(wf, cfg, strat_pool, pipe_num, opt=None): + """Normalize a functional mask.""" func_mask_normalize = pe.Node( interface=fsl.ImageMaths(), name=f"func_mask_normalize_{pipe_num}", diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index 9b2c389a09..5c989675c1 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2020-2022 C-PAC Developers +# Copyright (C) 2020-2024 C-PAC Developers # This file is part of C-PAC. @@ -17,7 +17,6 @@ # License along with C-PAC. If not, see . import os -from nipype import logging from nipype.interfaces import fsl import nipype.interfaces.io as nio from indi_aws import aws_utils @@ -47,8 +46,6 @@ from CPAC.utils.strategy import Strategy from CPAC.utils.utils import check_config_resources, check_prov_for_regtool -logger = logging.getLogger("nipype.workflow") - @nodeblock( name="mask_T1w_longitudinal_template", @@ -60,7 +57,7 @@ def mask_T1w_longitudinal_template(wf, cfg, strat_pool, pipe_num, opt=None): brain_mask = pe.Node( interface=fsl.maths.MathsCommand(), - name=f"longitudinal_anatomical_brain_mask_" f"{pipe_num}", + name=f"longitudinal_anatomical_brain_mask_{pipe_num}", ) brain_mask.inputs.args = "-bin" @@ -79,9 +76,8 @@ def create_datasink( session_id="", strat_name="", map_node_iterfield=None, -): +) -> pe.Node | pe.MapNode: """ - Parameters ---------- datasink_name @@ -90,15 +86,8 @@ def create_datasink( session_id strat_name map_node_iterfield - - Returns - ------- - """ - try: - encrypt_data = bool(config.pipeline_setup["Amazon-AWS"]["s3_encryption"]) - except: - encrypt_data = False + encrypt_data = config.pipeline_setup["Amazon-AWS"]["s3_encryption"] # TODO Enforce value with schema validation # Extract credentials path for output if it exists @@ -122,7 +111,8 @@ def create_datasink( ) if not s3_write_access: - raise Exception("Not able to write to bucket!") + msg = "Not able to write to bucket!" + raise Exception(msg) except Exception as e: if ( @@ -360,7 +350,7 @@ def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): for label in labels: apply_xfm = apply_transform( - f"warp_longitudinal_seg_to_T1w_{label}_" f"{pipe_num}", + f"warp_longitudinal_seg_to_T1w_{label}_{pipe_num}", reg_tool, time_series=False, num_cpus=num_cpus, @@ -630,7 +620,7 @@ def anat_longitudinal_wf(subject_id, sub_list, config): ) rpool.set_data( - "from-T1w_to-longitudinal_mode-image_" "desc-linear_xfm", + "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", select_sess, "warp_path", {}, @@ -722,9 +712,9 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): for sub_dict in sub_list: if "func" in sub_dict or "rest" in sub_dict: if "func" in sub_dict: - func_paths_dict = sub_dict["func"] + sub_dict["func"] else: - func_paths_dict = sub_dict["rest"] + sub_dict["rest"] unique_id = sub_dict["unique_id"] session_id_list.append(unique_id) @@ -832,9 +822,12 @@ def merge_func_preproc(working_directory): def register_func_longitudinal_template_to_standard( longitudinal_template_node, c, workflow, strat_init, strat_name ): - sub_mem_gb, num_cores_per_sub, num_ants_cores, num_omp_cores = ( - check_config_resources(c) - ) + ( + sub_mem_gb, + num_cores_per_sub, + num_ants_cores, + num_omp_cores, + ) = check_config_resources(c) strat_init_new = strat_init.fork() @@ -1211,6 +1204,7 @@ def func_longitudinal_template_wf(subject_id, strat_list, config): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = config["desired_orientation"] strat_init.update_resource_pool( {template_name: (resampled_template, "resampled_template")} diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 3b7451d8b8..be1d0c0c17 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1,4 +1,4 @@ -# Copyright (C) 2021-2023 C-PAC Developers +# Copyright (C) 2021-2024 C-PAC Developers # This file is part of C-PAC. @@ -19,31 +19,32 @@ import hashlib from itertools import chain import json -import logging import os import re -from typing import Optional, Union +from typing import Optional import warnings -from nipype import config # pylint: disable=wrong-import-order -from nipype.interfaces.utility import Rename # pylint: disable=wrong-import-order +from nipype import config, logging +from nipype.interfaces import afni +from nipype.interfaces.utility import Rename from CPAC.image_utils.spatial_smoothing import spatial_smoothing from CPAC.image_utils.statistical_transforms import ( fisher_z_score_standardize, z_score_standardize, ) -from CPAC.pipeline import ( - nipype_pipeline_engine as pe, # pylint: disable=ungrouped-imports -) +from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.check_outputs import ExpectedOutputs -from CPAC.pipeline.nodeblock import ( - NodeBlockFunction, # pylint: disable=ungrouped-imports +from CPAC.pipeline.nodeblock import NodeBlockFunction +from CPAC.pipeline.utils import ( + MOVEMENT_FILTER_KEYS, + name_fork, + source_set, ) -from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set from CPAC.registration.registration import transform_derivative from CPAC.resources.templates.lookup_table import lookup_identifier from CPAC.utils.bids_utils import res_in_filename +from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import ( create_anat_datasource, create_func_datasource, @@ -53,9 +54,13 @@ ) from CPAC.utils.interfaces.datasink import DataSink from CPAC.utils.interfaces.function import Function -from CPAC.utils.monitoring import getLogger, LOGTAIL, WARNING_FREESURFER_OFF_WITH_DATA +from CPAC.utils.monitoring import ( + getLogger, + LOGTAIL, + WARNING_FREESURFER_OFF_WITH_DATA, + WFLOGGER, +) from CPAC.utils.outputs import Outputs -from CPAC.utils.typing import LIST_OR_STR, TUPLE from CPAC.utils.utils import ( check_prov_for_regtool, create_id_string, @@ -64,8 +69,6 @@ write_output_json, ) -logger = getLogger("nipype.workflow") - class ResourcePool: def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): @@ -149,7 +152,7 @@ def append_name(self, name): def back_propogate_template_name( self, wf, resource_idx: str, json_info: dict, id_string: "pe.Node" ) -> None: - """Find and apply the template name from a resource's provenance + """Find and apply the template name from a resource's provenance. Parameters ---------- @@ -179,9 +182,9 @@ def back_propogate_template_name( # a different space, so don't use it as the space for # descendents try: - anscestor_json = list(self.rpool.get(source).items())[0][1].get( - "json", {} - ) + anscestor_json = next(iter(self.rpool.get(source).items()))[ + 1 + ].get("json", {}) if "Description" in anscestor_json: id_string.inputs.template_desc = anscestor_json[ "Description" @@ -227,7 +230,7 @@ def copy_rpool(self): @staticmethod def get_raw_label(resource: str) -> str: - """Removes ``desc-*`` label""" + """Remove ``desc-*`` label.""" for tag in resource.split("_"): if "desc-" in tag: resource = resource.replace(f"{tag}_", "") @@ -244,7 +247,9 @@ def get_strat_info(self, prov, label=None, logdir=None): if label: if not logdir: logdir = self.logdir - print(f"\n\nPrinting out strategy info for {label} in {logdir}\n") + WFLOGGER.info( + "\n\nPrinting out strategy info for %s in %s\n", label, logdir + ) write_output_json( strat_info, f"{label}_strat_info", indent=4, basedir=logdir ) @@ -252,15 +257,15 @@ def get_strat_info(self, prov, label=None, logdir=None): def set_json_info(self, resource, pipe_idx, key, val): # TODO: actually should probably be able to inititialize resource/pipe_idx if pipe_idx not in self.rpool[resource]: - raise Exception( + msg = ( "\n[!] DEV: The pipeline/strat ID does not exist " f"in the resource pool.\nResource: {resource}" f"Pipe idx: {pipe_idx}\nKey: {key}\nVal: {val}\n" ) - else: - if "json" not in self.rpool[resource][pipe_idx]: - self.rpool[resource][pipe_idx]["json"] = {} - self.rpool[resource][pipe_idx]["json"][key] = val + raise Exception(msg) + if "json" not in self.rpool[resource][pipe_idx]: + self.rpool[resource][pipe_idx]["json"] = {} + self.rpool[resource][pipe_idx]["json"][key] = val def get_json_info(self, resource, pipe_idx, key): # TODO: key checks @@ -279,12 +284,14 @@ def get_resource_from_prov(prov): return None if isinstance(prov[-1], list): return prov[-1][-1].split(":")[0] - elif isinstance(prov[-1], str): + if isinstance(prov[-1], str): return prov[-1].split(":")[0] + return None def regressor_dct(self, cfg) -> dict: - """Returns the regressor dictionary for the current strategy if - one exists. Raises KeyError otherwise. + """Return the regressor dictionary for the current strategy if one exists. + + Raises KeyError otherwise. """ # pylint: disable=attribute-defined-outside-init if hasattr(self, "_regressor_dct"): # memoized @@ -296,7 +303,6 @@ def regressor_dct(self, cfg) -> dict: "ingress_regressors." ) _nr = cfg["nuisance_corrections", "2-nuisance_regression"] - if not hasattr(self, "timeseries"): if _nr["Regressors"]: self.regressors = {reg["Name"]: reg for reg in _nr["Regressors"]} @@ -341,15 +347,18 @@ def set_data( try: res, new_pipe_idx = self.generate_prov_string(new_prov_list) except IndexError: - raise IndexError( + msg = ( f"\n\nThe set_data() call for {resource} has no " "provenance information and should not be an " "injection." ) + raise IndexError(msg) if not json_info: json_info = { - "RawSources": [resource] - } # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() + "RawSources": [ + resource # <---- this will be repopulated to the full file path at the end of the pipeline building, in gather_pipes() + ] + } json_info["CpacProvenance"] = new_prov_list if resource not in self.rpool.keys(): @@ -357,9 +366,8 @@ def set_data( elif not fork: # <--- in the event of multiple strategies/options, this will run for every option; just keep in mind search = False if self.get_resource_from_prov(current_prov_list) == resource: - pipe_idx = self.generate_prov_string(current_prov_list)[ - 1 - ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + pipe_idx = self.generate_prov_string(current_prov_list)[1] if pipe_idx not in self.rpool[resource].keys(): search = True else: @@ -368,22 +376,15 @@ def set_data( for idx in current_prov_list: if self.get_resource_from_prov(idx) == resource: if isinstance(idx, list): - pipe_idx = self.generate_prov_string( - idx - )[ - 1 - ] # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + # CHANGING PIPE_IDX, BE CAREFUL DOWNSTREAM IN THIS FUNCTION + pipe_idx = self.generate_prov_string(idx)[1] elif isinstance(idx, str): pipe_idx = idx break - if ( - pipe_idx in self.rpool[resource].keys() - ): # <--- in case the resource name is now new, and not the original - del self.rpool[ - resource - ][ - pipe_idx - ] # <--- remove old keys so we don't end up with a new strat for every new node unit (unless we fork) + if pipe_idx in self.rpool[resource].keys(): + # in case the resource name is now new, and not the original + # remove old keys so we don't end up with a new strat for every new node unit (unless we fork) + del self.rpool[resource][pipe_idx] if new_pipe_idx not in self.rpool[resource]: self.rpool[resource][new_pipe_idx] = {} if new_pipe_idx not in self.pipe_list: @@ -394,18 +395,17 @@ def set_data( def get( self, - resource: LIST_OR_STR, + resource: list[str] | str, pipe_idx: Optional[str] = None, report_fetched: Optional[bool] = False, optional: Optional[bool] = False, - ) -> Union[TUPLE[Optional[dict], Optional[str]], Optional[dict]]: + ) -> tuple[Optional[dict], Optional[str]] | Optional[dict]: # NOTE!!! - # if this is the main rpool, this will return a dictionary of strats, and inside those, are dictionaries like {'data': (node, out), 'json': info} - # BUT, if this is a sub rpool (i.e. a strat_pool), this will return a one-level dictionary of {'data': (node, out), 'json': info} WITHOUT THE LEVEL OF STRAT KEYS ABOVE IT + # if this is the main rpool, this will return a dictionary of strats, and inside those, are dictionaries like {'data': (node, out), 'json': info} + # BUT, if this is a sub rpool (i.e. a strat_pool), this will return a one-level dictionary of {'data': (node, out), 'json': info} WITHOUT THE LEVEL OF STRAT KEYS ABOVE IT if not isinstance(resource, list): resource = [resource] - # if a list of potential inputs are given, pick the first one - # found + # if a list of potential inputs are given, pick the first one found for label in resource: if label in self.rpool.keys(): _found = self.rpool[label] @@ -418,7 +418,7 @@ def get( if report_fetched: return (None, None) return None - raise LookupError( + msg = ( "\n\n[!] C-PAC says: None of the listed resources are in " f"the resource pool:\n\n {resource}\n\nOptions:\n- You " "can enable a node block earlier in the pipeline which " @@ -432,6 +432,7 @@ def get( "through any of our support channels at: " "https://fcp-indi.github.io/\n" ) + raise LookupError(msg) def get_data( self, resource, pipe_idx=None, report_fetched=False, quick_single=False @@ -444,10 +445,10 @@ def get_data( return (connect["data"], fetched) connect, fetched = self.get(resource, report_fetched=report_fetched) return (connect["data"], fetched) - elif pipe_idx: + if pipe_idx: return self.get(resource, pipe_idx=pipe_idx)["data"] - elif quick_single or len(self.get(resource)) == 1: - for key, val in self.get(resource).items(): + if quick_single or len(self.get(resource)) == 1: + for _key, val in self.get(resource).items(): return val["data"] return self.get(resource)["data"] @@ -455,7 +456,8 @@ def copy_resource(self, resource, new_name): try: self.rpool[new_name] = self.rpool[resource] except KeyError: - raise Exception(f"[!] {resource} not in the resource pool.") + msg = f"[!] {resource} not in the resource pool." + raise Exception(msg) def update_resource(self, resource, new_name): # move over any new pipe_idx's @@ -478,11 +480,12 @@ def get_json(self, resource, strat=None): if "json" in resource_strat_dct: strat_json = resource_strat_dct["json"] else: - raise Exception( + msg = ( "\n[!] Developer info: the JSON " f"information for {resource} and {strat} " f"is incomplete.\n" ) + raise Exception(msg) return strat_json def get_cpac_provenance(self, resource, strat=None): @@ -501,12 +504,13 @@ def get_cpac_provenance(self, resource, strat=None): def generate_prov_string(prov): # this will generate a string from a SINGLE RESOURCE'S dictionary of # MULTIPLE PRECEDING RESOURCES (or single, if just one) - # NOTE: this DOES NOT merge multiple resources!!! (i.e. for merging-strat pipe_idx generation) + # NOTE: this DOES NOT merge multiple resources!!! (i.e. for merging-strat pipe_idx generation) if not isinstance(prov, list): - raise Exception( + msg = ( "\n[!] Developer info: the CpacProvenance " f"entry for {prov} has to be a list.\n" ) + raise TypeError(msg) last_entry = get_last_prov_entry(prov) resource = last_entry.split(":")[0] return (resource, str(prov)) @@ -514,10 +518,11 @@ def generate_prov_string(prov): @staticmethod def generate_prov_list(prov_str): if not isinstance(prov_str, str): - raise Exception( + msg = ( "\n[!] Developer info: the CpacProvenance " f"entry for {prov_str!s} has to be a string.\n" ) + raise TypeError(msg) return ast.literal_eval(prov_str) @staticmethod @@ -544,7 +549,7 @@ def get_resource_strats_from_prov(prov): def flatten_prov(self, prov): if isinstance(prov, str): return [prov] - elif isinstance(prov, list): + if isinstance(prov, list): flat_prov = [] for entry in prov: if isinstance(entry, list): @@ -552,6 +557,7 @@ def flatten_prov(self, prov): else: flat_prov.append(entry) return flat_prov + return None def get_strats(self, resources, debug=False): # TODO: NOTE: NOT COMPATIBLE WITH SUB-RPOOL/STRAT_POOLS @@ -562,7 +568,7 @@ def get_strats(self, resources, debug=False): linked_resources = [] resource_list = [] if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("\nresources: %s", resources) for resource in resources: # grab the linked-input tuples @@ -576,7 +582,7 @@ def get_strats(self, resources, debug=False): continue linked.append(fetched_resource) resource_list += linked - if len(linked) < 2: + if len(linked) < 2: # noqa: PLR2004 continue linked_resources.append(linked) else: @@ -586,15 +592,18 @@ def get_strats(self, resources, debug=False): variant_pool = {} len_inputs = len(resource_list) if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("linked_resources: %s", linked_resources) verbose_logger.debug("resource_list: %s", resource_list) for resource in resource_list: - rp_dct, fetched_resource = self.get( + ( + rp_dct, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath + fetched_resource, + ) = self.get( resource, - report_fetched=True, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath - optional=True, - ) # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be + report_fetched=True, + optional=True, # oh, and we make the resource fetching in get_strats optional so we can have optional inputs, but they won't be optional in the node block unless we want them to be + ) if not rp_dct: len_inputs -= 1 continue @@ -614,7 +623,7 @@ def get_strats(self, resources, debug=False): variant_pool[fetched_resource].append(f"NO-{val[0]}") if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("%s sub_pool: %s\n", resource, sub_pool) total_pool.append(sub_pool) @@ -652,7 +661,7 @@ def get_strats(self, resources, debug=False): strat_list_list.append(strat_list) if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("len(strat_list_list): %s\n", len(strat_list_list)) for strat_list in strat_list_list: json_dct = {} @@ -742,10 +751,8 @@ def get_strats(self, resources, debug=False): # make the merged strat label from the multiple inputs # strat_list is actually the merged CpacProvenance lists pipe_idx = str(strat_list) - new_strats[pipe_idx] = ( - ResourcePool() - ) # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! - + new_strats[pipe_idx] = ResourcePool() + # new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! # placing JSON info at one level higher only for copy convenience new_strats[pipe_idx].rpool["json"] = {} new_strats[pipe_idx].rpool["json"]["subjson"] = {} @@ -754,12 +761,10 @@ def get_strats(self, resources, debug=False): # now just invert resource:strat to strat:resource for each resource:strat for cpac_prov in strat_list: resource, strat = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][ - strat - ] # <----- remember, this is the dct of 'data' and 'json'. - new_strats[pipe_idx].rpool[resource] = ( - resource_strat_dct # <----- new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. - ) + resource_strat_dct = self.rpool[resource][strat] + # remember, `resource_strat_dct` is the dct of 'data' and 'json'. + new_strats[pipe_idx].rpool[resource] = resource_strat_dct + # `new_strats` is A DICTIONARY OF RESOURCEPOOL OBJECTS! each one is a new slice of the resource pool combined together. self.pipe_list.append(pipe_idx) if "CpacVariant" in resource_strat_dct["json"]: if "CpacVariant" not in new_strats[pipe_idx].rpool["json"]: @@ -783,21 +788,18 @@ def get_strats(self, resources, debug=False): ) else: new_strats = {} - for resource_strat_list in ( - total_pool - ): # total_pool will have only one list of strats, for the one input + for resource_strat_list in total_pool: + # total_pool will have only one list of strats, for the one input for cpac_prov in resource_strat_list: # <------- cpac_prov here doesn't need to be modified, because it's not merging with other inputs resource, pipe_idx = self.generate_prov_string(cpac_prov) - resource_strat_dct = self.rpool[resource][ - pipe_idx - ] # <----- remember, this is the dct of 'data' and 'json'. + resource_strat_dct = self.rpool[resource][pipe_idx] + # remember, `resource_strat_dct` is the dct of 'data' and 'json'. new_strats[pipe_idx] = ResourcePool( rpool={resource: resource_strat_dct} ) # <----- again, new_strats is A DICTIONARY OF RESOURCEPOOL OBJECTS! # placing JSON info at one level higher only for copy convenience - new_strats[pipe_idx].rpool["json"] = resource_strat_dct[ - "json" - ] # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) + new_strats[pipe_idx].rpool["json"] = resource_strat_dct["json"] + # TODO: WARNING- THIS IS A LEVEL HIGHER THAN THE ORIGINAL 'JSON' FOR EASE OF ACCESS IN CONNECT_BLOCK WITH THE .GET(JSON) new_strats[pipe_idx].rpool["json"]["subjson"] = {} new_strats[pipe_idx].rpool["json"]["CpacProvenance"] = cpac_prov # preserve each input's JSON info also @@ -813,8 +815,7 @@ def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): if label in self.xfm: json_info = dict(json_info) - # get the bold-to-template transform from the current strat_pool - # info + # get the bold-to-template transform from the current strat_pool info xfm_idx = None xfm_label = "from-bold_to-template_mode-image_xfm" for entry in json_info["CpacProvenance"]: @@ -880,7 +881,7 @@ def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): @property def filtered_movement(self) -> bool: """ - Check if the movement parameters have been filtered in this strat_pool + Check if the movement parameters have been filtered in this strat_pool. Returns ------- @@ -894,14 +895,11 @@ def filtered_movement(self) -> bool: # not a strat_pool or no movement parameters in strat_pool return False - def filter_name(self, cfg) -> str: + def filter_name(self, cfg: Configuration) -> str: """ - In a strat_pool with filtered movement parameters, return the - name of the filter for this strategy + Return the name of the filter for this strategy. - Returns - ------- - str + In a strat_pool with filtered movement parameters. """ motion_filters = cfg[ "functional_preproc", @@ -958,7 +956,7 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) if label in Outputs.to_smooth: for smooth_opt in self.smooth_opts: sm = spatial_smoothing( - f"{label}_smooth_{smooth_opt}_" f"{pipe_x}", + f"{label}_smooth_{smooth_opt}_{pipe_x}", self.fwhm, input_type, smooth_opt, @@ -1052,8 +1050,7 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) connection[0], connection[1], zstd, "inputspec.correlation_file" ) - # if the output is 'space-template_desc-MeanSCA_correlations', we want - # 'desc-MeanSCA_timeseries' + # if the output is 'space-template_desc-MeanSCA_correlations', we want 'desc-MeanSCA_timeseries' oned = label.replace("correlations", "timeseries") node, out = outs[oned] @@ -1187,12 +1184,12 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): self.rpool[resource][pipe_idx]["json"] for pipe_idx in self.rpool[resource] ] - unlabelled = set( + unlabelled = { key for json_info in all_jsons for key in json_info.get("CpacVariant", {}).keys() if key not in (*MOVEMENT_FILTER_KEYS, "timeseries") - ) + } if "bold" in unlabelled: all_bolds = list( chain.from_iterable( @@ -1205,7 +1202,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # not any(not) because all is overloaded as a parameter here if not any( not re.match( - r"apply_(phasediff|blip)_to_" r"timeseries_separately_.*", _bold + r"apply_(phasediff|blip)_to_timeseries_separately_.*", _bold ) for _bold in all_bolds ): @@ -1225,7 +1222,8 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): } # del all_jsons for key, forks in all_forks.items(): - if len(forks) < 2: # no int suffix needed if only one fork + if len(forks) < 2: # noqa: PLR2004 + # no int suffix needed if only one fork unlabelled.remove(key) # del all_forks for pipe_idx in self.rpool[resource]: @@ -1322,7 +1320,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # need the single quote and the colon inside the double # quotes - it's the encoded pipe_idx # atlas_idx = new_idx.replace(f"'{temp_rsc}:", - # "'atlas_name:") + # "'atlas_name:") if atlas_idx in self.rpool["atlas_name"]: node, out = self.rpool["atlas_name"][atlas_idx]["data"] wf.connect(node, out, id_string, "atlas_id") @@ -1340,7 +1338,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): ) ) ) - nii_name = pe.Node(Rename(), name=f"nii_{resource_idx}_" f"{pipe_x}") + nii_name = pe.Node(Rename(), name=f"nii_{resource_idx}_{pipe_x}") nii_name.inputs.keep_ext = True if resource in Outputs.ciftis: @@ -1362,7 +1360,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): try: wf.connect(node, out, nii_name, "in_file") except OSError as os_error: - logger.warning(os_error) + WFLOGGER.warning(os_error) continue write_json_imports = ["import os", "import json"] @@ -1378,7 +1376,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): write_json.inputs.json_data = json_info wf.connect(id_string, "out_filename", write_json, "filename") - ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_" f"{pipe_x}") + ds = pe.Node(DataSink(), name=f"sinker_{resource_idx}_{pipe_x}") ds.inputs.parameterization = False ds.inputs.base_directory = out_dct["out_dir"] ds.inputs.encrypt_bucket_keys = cfg.pipeline_setup["Amazon-AWS"][ @@ -1406,7 +1404,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): outputs_logger.info(expected_outputs) def node_data(self, resource, **kwargs): - """Factory function to create NodeData objects + """Create NodeData objects. Parameters ---------- @@ -1435,13 +1433,14 @@ def __init__(self, node_block_functions, debug=False): self.input_interface = [self.input_interface] if not isinstance(node_block_function, NodeBlockFunction): - # If the object is a plain function `__name__` will be more useful then `str()` + # If the object is a plain function `__name__` will be more useful than `str()` obj_str = ( node_block_function.__name__ if hasattr(node_block_function, "__name__") else str(node_block_function) ) - raise TypeError(f'Object is not a nodeblock: "{obj_str}"') + msg = f'Object is not a nodeblock: "{obj_str}"' + raise TypeError(msg) name = node_block_function.name self.name = name @@ -1475,11 +1474,11 @@ def __init__(self, node_block_functions, debug=False): if node_block_function.outputs is not None: self.options = node_block_function.outputs - logger.info("Connecting %s...", name) + WFLOGGER.info("Connecting %s...", name) if debug: config.update_config({"logging": {"workflow_level": "DEBUG"}}) logging.update_logging(config) - logger.debug( + WFLOGGER.debug( '"inputs": %s\n\t "outputs": %s%s', node_block_function.inputs, list(self.outputs.keys()), @@ -1500,21 +1499,21 @@ def check_null(self, val): def check_output(self, outputs, label, name): if label not in outputs: - raise NameError( + msg = ( f'\n[!] Output name "{label}" in the block ' "function does not match the outputs list " f'{outputs} in Node Block "{name}"\n' ) + raise NameError(msg) def grab_tiered_dct(self, cfg, key_list): cfg_dct = cfg.dict() for key in key_list: try: cfg_dct = cfg_dct.get(key, {}) - except KeyError: - raise Exception( - "[!] The config provided to the node block is not valid" - ) + except KeyError as ke: + msg = "[!] The config provided to the node block is not valid" + raise KeyError(msg) from ke return cfg_dct def connect_block(self, wf, cfg, rpool): @@ -1540,12 +1539,12 @@ def connect_block(self, wf, cfg, rpool): else: for option in option_val: try: - if ( - option in self.grab_tiered_dct(cfg, key_list) - ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if option in self.grab_tiered_dct(cfg, key_list): + # goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) except AttributeError as err: - raise Exception(f"{err}\nNode Block: {name}") + msg = f"{err}\nNode Block: {name}" + raise Exception(msg) if opts is None: opts = [opts] @@ -1553,7 +1552,7 @@ def connect_block(self, wf, cfg, rpool): elif option_key and not option_val: # enables multiple config forking entries if not isinstance(option_key[0], list): - raise Exception( + msg = ( f"[!] The option_key field ({option_key}) " f"for {name} exists but there is no " "option_val.\n\nIf you are trying to " @@ -1561,6 +1560,7 @@ def connect_block(self, wf, cfg, rpool): "option_val field must contain a list of " "a list.\n" ) + raise ValueError(msg) for option_config in option_key: # option_config is a list of pipe config levels down to the option if config: @@ -1570,7 +1570,7 @@ def connect_block(self, wf, cfg, rpool): option_val = option_config[-1] if option_val in self.grab_tiered_dct(cfg, key_list[:-1]): opts.append(option_val) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! opts = [None] all_opts += opts @@ -1586,10 +1586,8 @@ def connect_block(self, wf, cfg, rpool): "output_directory" ]["user_defined"] - for ( - name, - block_dct, - ) in self.node_blocks.items(): # <--- iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. + for name, block_dct in self.node_blocks.items(): + # iterates over either the single node block in the sequence, or a list of node blocks within the list of node blocks, i.e. for option forking. switch = self.check_null(block_dct["switch"]) config = self.check_null(block_dct["config"]) option_key = self.check_null(block_dct["option_key"]) @@ -1614,14 +1612,12 @@ def connect_block(self, wf, cfg, rpool): opts = self.grab_tiered_dct(cfg, key_list) else: for option in option_val: - if ( - option in self.grab_tiered_dct(cfg, key_list) - ): # <---- goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list + if option in self.grab_tiered_dct(cfg, key_list): + # goes over the option_vals in the node block docstring, and checks if the user's pipeline config included it in the forking list opts.append(option) - else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! - opts = [ - None - ] # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples + else: # AND, if there are multiple option-val's (in a list) in the docstring, it gets iterated below in 'for opt in option' etc. AND THAT'S WHEN YOU HAVE TO DELINEATE WITHIN THE NODE BLOCK CODE!!! + opts = [None] + # THIS ALSO MEANS the multiple option-val's in docstring node blocks can be entered once in the entire node-block sequence, not in a list of multiples if not opts: # for node blocks where the options are split into different # block functions - opts will be empty for non-selected @@ -1634,14 +1630,14 @@ def connect_block(self, wf, cfg, rpool): if config: try: key_list = config + switch - except TypeError: - raise Exception( + except TypeError as te: + msg = ( "\n\n[!] Developer info: Docstring error " f"for {name}, make sure the 'config' or " "'switch' fields are lists.\n\n" ) + raise TypeError(msg) from te switch = self.grab_tiered_dct(cfg, key_list) - elif isinstance(switch[0], list): # we have multiple switches, which is designed to only work if # config is set to "None" @@ -1667,17 +1663,17 @@ def connect_block(self, wf, cfg, rpool): if not isinstance(switch, list): switch = [switch] if True in switch: - for pipe_idx, strat_pool in rpool.get_strats( - inputs, debug - ).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} - fork = ( - False in switch - ) # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } - for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. + for ( + pipe_idx, + strat_pool, # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} + ) in rpool.get_strats(inputs, debug).items(): + # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } + fork = False in switch + for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. # remember, you can get 'data' or 'json' from strat_pool with member functions # strat_pool has all of the JSON information of all the inputs! # so when we set_data below for the TOP-LEVEL MAIN RPOOL (not the strat_pool), we can generate new merged JSON information for each output. - # particularly, our custom 'CpacProvenance' field. + # particularly, our custom 'CpacProvenance' field. node_name = name pipe_x = rpool.get_pipe_number(pipe_idx) @@ -1694,12 +1690,12 @@ def connect_block(self, wf, cfg, rpool): try: wf, outs = block_function(wf, cfg, strat_pool, pipe_x, opt) except IOError as e: # duplicate node - logger.warning(e) + WFLOGGER.warning(e) continue if not outs: - if block_function.__name__ == "freesurfer_" "postproc": - logger.warning(WARNING_FREESURFER_OFF_WITH_DATA) + if block_function.__name__ == "freesurfer_postproc": + WFLOGGER.warning(WARNING_FREESURFER_OFF_WITH_DATA) LOGTAIL["warnings"].append( WARNING_FREESURFER_OFF_WITH_DATA ) @@ -1711,7 +1707,7 @@ def connect_block(self, wf, cfg, rpool): node_name = f'{node_name}_{opt["Name"]}' if debug: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("\n=======================") verbose_logger.debug("Node name: %s", node_name) prov_dct = rpool.get_resource_strats_from_prov( @@ -1736,7 +1732,7 @@ def connect_block(self, wf, cfg, rpool): new_json_info = copy.deepcopy(strat_pool.get("json")) # transfer over data-specific json info - # for example, if the input data json is _bold and the output is also _bold + # for example, if the input data json is _bold and the output is also _bold data_type = label.split("_")[-1] if data_type in new_json_info["subjson"]: if ( @@ -1845,8 +1841,7 @@ def connect_block(self, wf, cfg, rpool): def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): - """Wrap a list of node block functions to make them easier to use within - other node blocks. + """Wrap a list of node block functions to use within other node blocks. Example usage: @@ -1914,7 +1909,7 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): if "anat" not in data_paths: - print("No anatomical data present.") + WFLOGGER.warning("No anatomical data present.") return rpool if "creds_path" not in data_paths: @@ -1923,7 +1918,7 @@ def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id anat_flow = create_anat_datasource(f"anat_T1w_gather_{part_id}_{ses_id}") anat = {} - if type(data_paths["anat"]) is str: + if isinstance(data_paths["anat"], str): anat["T1"] = data_paths["anat"] elif "T1w" in data_paths["anat"]: anat["T1"] = data_paths["anat"]["T1w"] @@ -1961,7 +1956,7 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): try: fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], part_id) except KeyError: - print("No FreeSurfer data present.") + WFLOGGER.warning("No FreeSurfer data present.") return rpool # fs_path = os.path.join(cfg.pipeline_setup['freesurfer_dir'], part_id) @@ -1980,7 +1975,7 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): subj_ses = part_id + "-" + ses_id fs_path = os.path.join(cfg.pipeline_setup["freesurfer_dir"], subj_ses) if not os.path.exists(fs_path): - print(f"No FreeSurfer data found for subject {part_id}") + WFLOGGER.info("No FreeSurfer data found for subject %s", part_id) return rpool # Check for double nested subj names @@ -2043,7 +2038,7 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): ) else: warnings.warn( - str(LookupError("\n[!] Path does not exist for " f"{fullpath}.\n")) + str(LookupError(f"\n[!] Path does not exist for {fullpath}.\n")) ) return rpool @@ -2088,7 +2083,7 @@ def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id # pylint: disable=protected-access wf._local_func_scans = local_func_scans if cfg.pipeline_setup["Debugging"]["verbose"]: - verbose_logger = getLogger("engine") + verbose_logger = getLogger("CPAC.engine") verbose_logger.debug("local_func_scans: %s", local_func_scans) del local_func_scans @@ -2100,7 +2095,7 @@ def ingress_output_dir( ): dir_path = data_paths["derivatives_dir"] - print(f"\nPulling outputs from {dir_path}.\n") + WFLOGGER.info("\nPulling outputs from %s.\n", dir_path) anat = os.path.join(dir_path, "anat") func = os.path.join(dir_path, "func") @@ -2143,11 +2138,12 @@ def ingress_output_dir( data_label = filename.split(unique_id)[1].lstrip("_") if len(filename) == len(data_label): - raise Exception( + msg = ( "\n\n[!] Possibly wrong participant or " "session in this directory?\n\n" f"Filepath: {filepath}\n\n" ) + raise Exception(msg) bidstag = "" for tag in data_label.split("_"): @@ -2253,8 +2249,8 @@ def json_outdir_ingress(rpool, filepath, exts, data_label, json): jsonpath = f"{jsonpath}.json" if not os.path.exists(jsonpath): - print( - f"\n\n[!] No JSON found for file {filepath}.\nCreating " f"{jsonpath}..\n\n" + WFLOGGER.info( + "\n\n[!] No JSON found for file %s.\nCreating %s..\n\n", filepath, jsonpath ) json_info = { "Description": "This data was generated elsewhere and " @@ -2283,13 +2279,14 @@ def json_outdir_ingress(rpool, filepath, exts, data_label, json): if only_desc[-1] == "-": only_desc = only_desc.rstrip("-") else: - raise Exception( + msg = ( "\n[!] Something went wrong with either " "reading in the output directory or when " "it was written out previously.\n\nGive " "this to your friendly local C-PAC " f"developer:\n\n{data_label!s}\n" ) + raise IOError(msg) # remove the integer at the end of the desc-* variant, we will # get the unique pipe_idx from the CpacProvenance below @@ -2319,7 +2316,6 @@ def func_outdir_ingress( wf, cfg, func_dict, rpool, unique_id, creds_path, part_id, key, func_paths ): pipe_x = len(rpool.pipe_list) - exts = [".nii", ".gz", ".mat", ".1D", ".txt", ".csv", ".rms", ".tsv"] ingress = create_func_datasource( func_dict, rpool, f"gather_func_outdir_{key}_{pipe_x}" ) @@ -2362,7 +2358,7 @@ def func_outdir_ingress( wf.connect(ingress, "outputspec.scan", iterables, "scan") for key in func_paths: - if key == mask_paths_key or key == ts_paths_key: + if key in (mask_paths_key, ts_paths_key): ingress_func = create_general_datasource(f"ingress_func_data_{key}") ingress_func.inputs.inputnode.set( unique_id=unique_id, @@ -2412,7 +2408,7 @@ def strip_template(data_label, dir_path, filename): return data_label, json -def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): +def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well @@ -2421,6 +2417,7 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") template_df = pd.read_csv(template_csv, keep_default_na=False) + desired_orientation = cfg.pipeline_setup["desired_orientation"] for row in template_df.itertuples(): key = row.Key @@ -2477,7 +2474,13 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): resampled_template = pe.Node( Function( - input_names=["resolution", "template", "template_name", "tag"], + input_names=[ + "orientation", + "resolution", + "template", + "template_name", + "tag", + ], output_names=["resampled_template"], function=resolve_resolution, as_module=True, @@ -2485,24 +2488,15 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): name="resampled_" + key, ) + resampled_template.inputs.orientation = desired_orientation resampled_template.inputs.resolution = resolution resampled_template.inputs.template = val resampled_template.inputs.template_name = key resampled_template.inputs.tag = tag - # the set_data below is set up a little differently, because we are - # injecting and also over-writing already-existing entries - # other alternative would have been to ingress into the - # resampled_template node from the already existing entries, but we - # didn't do that here - rpool.set_data( - key, - resampled_template, - "resampled_template", - json_info, - "", - "template_resample", - ) # , inject=True) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually + node = resampled_template + output = "resampled_template" + node_name = "template_resample" elif val: config_ingress = create_general_datasource(f"gather_{key}") @@ -2512,14 +2506,33 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): creds_path=creds_path, dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - rpool.set_data( - key, - config_ingress, - "outputspec.data", - json_info, - "", - f"{key}_config_ingress", - ) + node = config_ingress + output = "outputspec.data" + node_name = f"{key}_config_ingress" + + if val.endswith(".nii" or ".nii.gz"): + check_reorient = pe.Node( + interface=afni.Resample(), + name=f"reorient_{key}", + ) + + check_reorient.inputs.orientation = desired_orientation + check_reorient.inputs.outputtype = "NIFTI_GZ" + + wf.connect(node, output, check_reorient, "in_file") + node = check_reorient + output = "out_file" + node_name = f"{key}_reorient" + + rpool.set_data( + key, + node, + output, + json_info, + "", + node_name, + ) + # templates, resampling from config """ template_keys = [ @@ -2605,12 +2618,12 @@ def _set_nested(attr, keys): ) cfg.set_nested(cfg, key, node) """ - - return rpool + return wf, rpool def initiate_rpool(wf, cfg, data_paths=None, part_id=None): """ + Initialize a new ResourcePool. data_paths format: {'anat': { @@ -2676,7 +2689,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): ) # grab any file paths from the pipeline config YAML - rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) + wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path) # output files with 4 different scans @@ -2709,11 +2722,11 @@ def run_node_blocks(blocks, data_paths, cfg=None): run_blocks = [] if rpool.check_rpool("desc-preproc_T1w"): - print("Preprocessed T1w found, skipping anatomical preprocessing.") + WFLOGGER.info("Preprocessed T1w found, skipping anatomical preprocessing.") else: run_blocks += blocks[0] if rpool.check_rpool("desc-preproc_bold"): - print("Preprocessed BOLD found, skipping functional preprocessing.") + WFLOGGER.info("Preprocessed BOLD found, skipping functional preprocessing.") else: run_blocks += blocks[1] @@ -2727,16 +2740,15 @@ def run_node_blocks(blocks, data_paths, cfg=None): class NodeData: - r"""Class to hold outputs of - CPAC.pipeline.engine.ResourcePool().get_data(), so one can do + r"""Attribute access for ResourcePool.get_data outputs. - ``node_data = strat_pool.node_data(resource)`` and have - ``node_data.node`` and ``node_data.out`` instead of doing - ``node, out = strat_pool.get_data(resource)`` and needing two - variables (``node`` and ``out``) to store that information. + Class to hold outputs of CPAC.pipeline.engine.ResourcePool().get_data(), so one can + do ``node_data = strat_pool.node_data(resource)`` and have ``node_data.node`` and + ``node_data.out`` instead of doing ``node, out = strat_pool.get_data(resource)`` + and needing two variables (``node`` and ``out``) to store that information. - Also includes ``variant`` attribute providing the resource's self- - keyed value within its ``CpacVariant`` dictionary. + Also includes ``variant`` attribute providing the resource's self-keyed value + within its ``CpacVariant`` dictionary. Examples -------- @@ -2766,5 +2778,5 @@ def __init__(self, strat_pool=None, resource=None, **kwargs): if strat_pool is not None and resource is not None: self.node, self.out = strat_pool.get_data(resource, **kwargs) - def __repr__(self): + def __repr__(self): # noqa: D105 return f'{getattr(self.node, "name", str(self.node))} ({self.out})' diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index eb83a9107f..fa36a0dd2e 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Validation schema for C-PAC pipeline configurations""" +"""Validation schema for C-PAC pipeline configurations.""" # pylint: disable=too-many-lines from itertools import chain, permutations @@ -58,13 +58,13 @@ # ('x', # 1 or more digits, optional decimal, 0 or more lowercase characters (units) # ) 0 or more times -RESOLUTION_REGEX = r"^[0-9]+(\.[0-9]*){0,1}[a-z]*" r"(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$" +RESOLUTION_REGEX = r"^[0-9]+(\.[0-9]*){0,1}[a-z]*(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$" Number = Any(float, int, All(str, Match(SCIENTIFIC_NOTATION_STR_REGEX))) def str_to_bool1_1(x): # pylint: disable=invalid-name - """Convert strings to Booleans for YAML1.1 syntax + """Convert strings to Booleans for YAML1.1 syntax. Ref https://yaml.org/type/bool.html @@ -91,11 +91,12 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name else x ) if not isinstance(x, (bool, int)): - raise BooleanInvalid( + msg = ( 'Type boolean value was expected, type ' f'{getattr(type(x), "__name__", str(type(x)))} ' f'value\n\n{x}\n\nwas provided' ) + raise BooleanInvalid(msg) return bool(x) @@ -316,7 +317,7 @@ def str_to_bool1_1(x): # pylint: disable=invalid-name def name_motion_filter(mfilter, mfilters=None): - """Given a motion filter, create a short string for the filename + """Given a motion filter, create a short string for the filename. Parameters ---------- @@ -385,8 +386,7 @@ def name_motion_filter(mfilter, mfilters=None): def permutation_message(key, options): - """Function to give a clean, human-readable error message for keys - that accept permutation values + """Give a human-readable error message for keys that accept permutation values. Parameters ---------- @@ -413,7 +413,7 @@ def permutation_message(key, options): def sanitize(filename): - """Sanitize a filename and replace whitespaces with underscores""" + """Sanitize a filename and replace whitespaces with underscores.""" return re.sub(r"\s+", "_", sanitize_filename(filename)) @@ -423,6 +423,9 @@ def sanitize(filename): "skip env check": Maybe(bool), # flag for skipping an environment check "pipeline_setup": { "pipeline_name": All(str, Length(min=1), sanitize), + "desired_orientation": In( + {"RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"} + ), "output_directory": { "path": str, "source_outputs_dir": Maybe(str), @@ -1254,10 +1257,11 @@ def sanitize(filename): def schema(config_dict): - """Validate a pipeline configuration against the latest validation schema - by first applying backwards-compatibility patches, then applying - Voluptuous validation, then handling complex configuration interaction - checks before returning validated config_dict. + """Validate a participant-analysis pipeline configuration. + + Validate against the latest validation schema by first applying backwards- + compatibility patches, then applying Voluptuous validation, then handling complex + configuration interaction checks before returning validated config_dict. Parameters ---------- @@ -1277,9 +1281,12 @@ def schema(config_dict): "2-nuisance_regression", "space", ] and isinstance(multiple_invalid.errors[0], CoerceInvalid): - raise CoerceInvalid( + msg = ( 'Nusiance regression space is not forkable. Please choose ' - f'only one of {valid_options["space"]}', + f'only one of {valid_options["space"]}' + ) + raise CoerceInvalid( + msg, path=multiple_invalid.path, ) from multiple_invalid raise multiple_invalid @@ -1306,24 +1313,26 @@ def schema(config_dict): ]["space"] != "template" ): - raise ExclusiveInvalid( + msg = ( "``single_step_resampling_from_stc`` requires " "template-space nuisance regression. Either set " "``nuisance_corrections: 2-nuisance_regression: space`` " f"to ``template`` {or_else}" ) + raise ExclusiveInvalid(msg) if any( registration != "ANTS" for registration in partially_validated["registration_workflows"][ "anatomical_registration" ]["registration"]["using"] ): - raise ExclusiveInvalid( + msg = ( "``single_step_resampling_from_stc`` requires " "ANTS registration. Either set " "``registration_workflows: anatomical_registration: " f"registration: using`` to ``ANTS`` {or_else}" ) + raise ExclusiveInvalid(msg) except KeyError: pass try: @@ -1351,12 +1360,15 @@ def schema(config_dict): Length(min=1, max=1)(mec["motion_correction"]["using"]) except LengthInvalid: mec_path = ["functional_preproc", "motion_estimates_and_correction"] - raise LengthInvalid( # pylint: disable=raise-missing-from + msg = ( f'If data[{"][".join(map(repr, mec_path))}][\'run\'] is ' # length must be between 1 and # len(valid_options['motion_correction']) once #1935 is # resolved - 'True, length of list must be exactly 1', + 'True, length of list must be exactly 1' + ) + raise LengthInvalid( # pylint: disable=raise-missing-from + msg, path=[*mec_path, "motion_correction", "using"], ) except KeyError: @@ -1371,10 +1383,11 @@ def schema(config_dict): "create_regressors" ] ): - raise ExclusiveInvalid( + msg = ( "[!] Ingress_regressors and create_regressors can't both run! " " Try turning one option off.\n " ) + raise ExclusiveInvalid(msg) except KeyError: pass try: @@ -1396,12 +1409,13 @@ def schema(config_dict): ) as error: import site - raise OSError( + msg = ( "U-Net brain extraction requires torch to be installed, " "but the installation path in this container is " "read-only. Please bind a local writable path to " f'"{site.USER_BASE}" in the container to use U-Net.' - ) from error + ) + raise OSError(msg) from error except KeyError: pass return partially_validated diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index c228fc3640..cf85f50dbe 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -90,7 +90,7 @@ def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): rpool = ResourcePool(name=unique_id, cfg=cfg) - rpool = ingress_pipeconfig_paths(cfg, rpool, sub_data_dct, unique_id) + wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, sub_data_dct, unique_id) rpool.gather_pipes(wf, cfg, all=True) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 7410b335f2..33af078797 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2024 C-PAC Developers # This file is part of C-PAC. @@ -15,8 +15,11 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order +"""Workflows for registration.""" + from typing import Optional +from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils @@ -36,8 +39,8 @@ seperate_warps_list, single_ants_xfm_to_list, ) +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge -from CPAC.utils.typing import LIST_OR_STR, TUPLE from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool @@ -49,12 +52,13 @@ def apply_transform( num_cpus=1, num_ants_cores=1, ): + """Apply transform.""" if not reg_tool: - raise Exception( - "\n[!] Developer info: the 'reg_tool' parameter sent " - f"to the 'apply_transform' node for '{wf_name}' is " - f"empty.\n" + msg = ( + "\n[!] Developer info: the 'reg_tool' parameter sent to the" + f" 'apply_transform' node for '{wf_name}' is empty.\n" ) + raise RequiredFieldInvalid(msg) wf = pe.Workflow(name=wf_name) @@ -101,7 +105,7 @@ def apply_transform( wf.connect(inputNode, "reference", apply_warp, "reference_image") interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -115,7 +119,7 @@ def apply_transform( wf.connect(interp_string, "interpolation", apply_warp, "interpolation") ants_xfm_list = pe.Node( - util.Function( + Function( input_names=["transform"], output_names=["transform_list"], function=single_ants_xfm_to_list, @@ -130,9 +134,9 @@ def apply_transform( # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: - chunk_imports = ["import nibabel as nb"] + chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -151,7 +155,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -193,7 +197,7 @@ def apply_transform( ) interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -217,9 +221,9 @@ def apply_transform( # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: - chunk_imports = ["import nibabel as nb"] + chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -238,7 +242,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -324,7 +328,7 @@ def transform_derivative( def convert_pedir(pedir, convert="xyz_to_int"): - """FSL Flirt requires pedir input encoded as an int""" + """FSL Flirt requires pedir input encoded as an int.""" if convert == "xyz_to_int": conv_dct = { "x": 1, @@ -352,18 +356,16 @@ def convert_pedir(pedir, convert="xyz_to_int"): if isinstance(pedir, bytes): pedir = pedir.decode() if not isinstance(pedir, str): - raise Exception( - "\n\nPhase-encoding direction must be a " - f"string value.\n\nValue: {pedir}" - "\n\n" - ) + msg = f"\n\nPhase-encoding direction must be a string value.\n\nValue: {pedir}\n\n" + raise ValueError(msg) if pedir not in conv_dct.keys(): - raise Exception("\n\nInvalid phase-encoding direction " f"entered: {pedir}\n\n") - pedir = conv_dct[pedir] - return pedir + msg = f"\n\nInvalid phase-encoding direction entered: {pedir}\n\n" + raise ValueError(msg) + return conv_dct[pedir] def create_fsl_flirt_linear_reg(name="fsl_flirt_linear_reg"): + """Create a FLIRT workflow.""" linear_register = pe.Workflow(name=name) inputspec = pe.Node( @@ -404,9 +406,7 @@ def create_fsl_flirt_linear_reg(name="fsl_flirt_linear_reg"): def create_fsl_fnirt_nonlinear_reg(name="fsl_fnirt_nonlinear_reg"): - """ - Performs non-linear registration of an input file to a reference file - using FSL FNIRT. + """Perform non-linear registration of an input to a reference using FSL FNIRT. Parameters ---------- @@ -515,9 +515,7 @@ def create_fsl_fnirt_nonlinear_reg(name="fsl_fnirt_nonlinear_reg"): def create_fsl_fnirt_nonlinear_reg_nhp(name="fsl_fnirt_nonlinear_reg_nhp"): - """ - Performs non-linear registration of an input file to a reference file - using FSL FNIRT. + """Perform non-linear registration of an input to a reference using FSL FNIRT. Parameters ---------- @@ -680,9 +678,9 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name="fsl_fnirt_nonlinear_reg_nhp"): def create_register_func_to_anat( config, phase_diff_distcor=False, name="register_func_to_anat" ): - """ - Registers a functional scan in native space to anatomical space using a - linear transform and does not include bbregister. + """Register a functional scan in native space to anatomical space... + + ...using a linear transform and does not include bbregister. Parameters ---------- @@ -764,7 +762,7 @@ def create_register_func_to_anat( if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -803,12 +801,9 @@ def create_register_func_to_anat( def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_T2"): - # for monkey data - # ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 - # https://github.com/HechengJin0/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 - """ - Registers a functional scan in native space to anatomical space using a - linear transform and does not include bbregister, use T1 and T2 image. + """Register a functional scan in native space to anatomical space... + + ...using a linear transform and does not include bbregister, use T1 and T2 image. Parameters ---------- @@ -823,6 +818,10 @@ def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_ Notes ----- + for monkey data + ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/90e7e3f/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 + https://github.com/HechengJin0/dcan-macaque-pipeline/blob/9f69302/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 + Workflow Inputs:: inputspec.func : string (nifti file) @@ -961,9 +960,9 @@ def create_register_func_to_anat_use_T2(config, name="register_func_to_anat_use_ def create_bbregister_func_to_anat( phase_diff_distcor=False, name="bbregister_func_to_anat" ): - """ - Registers a functional scan in native space to structural. This is - meant to be used after create_nonlinear_register() has been run and + """Register a functional scan in native space to structural. + + This is meant to be used after create_nonlinear_register() has been run and relies on some of its outputs. Parameters @@ -1069,7 +1068,7 @@ def bbreg_args(bbreg_target): if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -1108,9 +1107,9 @@ def bbreg_args(bbreg_target): def create_wf_calculate_ants_warp( name="create_wf_calculate_ants_warp", num_threads=1, reg_ants_skull=1 ): - """ - Calculates the nonlinear ANTS registration transform. This workflow - employs the antsRegistration tool: + """Calculate the nonlinear ANTS registration transform. + + This workflow employs the antsRegistration tool: http://stnava.github.io/ANTs/ @@ -1278,7 +1277,7 @@ def create_wf_calculate_ants_warp( """ reg_imports = ["import os", "import subprocess"] calculate_ants_warp = pe.Node( - interface=util.Function( + interface=Function( input_names=[ "moving_brain", "reference_brain", @@ -1304,7 +1303,7 @@ def create_wf_calculate_ants_warp( calculate_ants_warp.interface.num_threads = num_threads select_forward_initial = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1315,7 +1314,7 @@ def create_wf_calculate_ants_warp( select_forward_initial.inputs.selection = "Initial" select_forward_rigid = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1326,7 +1325,7 @@ def create_wf_calculate_ants_warp( select_forward_rigid.inputs.selection = "Rigid" select_forward_affine = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1337,7 +1336,7 @@ def create_wf_calculate_ants_warp( select_forward_affine.inputs.selection = "Affine" select_forward_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1348,7 +1347,7 @@ def create_wf_calculate_ants_warp( select_forward_warp.inputs.selection = "Warp" select_inverse_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1456,6 +1455,7 @@ def create_wf_calculate_ants_warp( def FSL_registration_connector( wf_name, cfg, orig="T1w", opt=None, symmetric=False, template="T1w" ): + """Transform raw data to template with FSL.""" wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -1485,7 +1485,7 @@ def FSL_registration_connector( if template == "EPI": tmpl = "EPI" - if opt == "FSL" or opt == "FSL-linear": + if opt in ("FSL", "FSL-linear"): flirt_reg_anat_mni = create_fsl_flirt_linear_reg( f"anat_mni_flirt_register{symm}" ) @@ -1518,7 +1518,7 @@ def FSL_registration_connector( ) write_invlin_composite_xfm = pe.Node( - interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_" f"nii{symm}" + interface=fsl.ConvertWarp(), name=f"fsl_invlin-warp_to_nii{symm}" ) wf.connect( @@ -1625,6 +1625,7 @@ def FSL_registration_connector( def ANTs_registration_connector( wf_name, cfg, params, orig="T1w", symmetric=False, template="T1w" ): + """Transform raw data to template with ANTs.""" wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -1655,12 +1656,11 @@ def ANTs_registration_connector( if params is None: err_msg = ( - "\n\n[!] C-PAC says: \nYou have selected ANTs as your " - "anatomical registration method.\n" - "However, no ANTs parameters were specified.\n" - "Please specify ANTs parameters properly and try again." + "\n\n[!] C-PAC says: \nYou have selected ANTs as your" + " anatomical registration method.\nHowever, no ANTs parameters were" + " specified.\nPlease specify ANTs parameters properly and try again." ) - raise Exception(err_msg) + raise RequiredFieldInvalid(err_msg) ants_reg_anat_mni = create_wf_calculate_ants_warp( f"anat_mni_ants_register{symm}", @@ -1703,7 +1703,7 @@ def ANTs_registration_connector( "ANTs" ]["use_lesion_mask"]: # Create lesion preproc node to apply afni Refit and Resample - lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}") + lesion_preproc = create_lesion_preproc(cfg, wf_name=f"lesion_preproc{symm}") wf.connect(inputNode, "lesion_mask", lesion_preproc, "inputspec.lesion") wf.connect( lesion_preproc, @@ -1756,7 +1756,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1802,7 +1802,7 @@ def ANTs_registration_connector( write_composite_invlinear_xfm.inputs.dimension = 3 collect_inv_transforms = pe.Node( - util.Merge(3), name="collect_inv_transforms" f"{symm}" + util.Merge(3), name=f"collect_inv_transforms{symm}" ) wf.connect( @@ -1819,7 +1819,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_invlinear_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1841,7 +1841,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -1882,7 +1882,7 @@ def ANTs_registration_connector( write_composite_xfm.inputs.dimension = 3 collect_all_transforms = pe.Node( - util.Merge(4), name=f"collect_all_transforms" f"{symm}" + util.Merge(4), name=f"collect_all_transforms{symm}" ) wf.connect( @@ -1903,7 +1903,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1939,7 +1939,7 @@ def ANTs_registration_connector( write_composite_inv_xfm.inputs.dimension = 3 collect_all_inv_transforms = pe.Node( - util.Merge(4), name=f"collect_all_inv_transforms" f"{symm}" + util.Merge(4), name=f"collect_all_inv_transforms{symm}" ) wf.connect( @@ -1972,7 +1972,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_inv_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1994,7 +1994,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_all_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -2053,6 +2053,7 @@ def ANTs_registration_connector( def bold_to_T1template_xfm_connector( wf_name, cfg, reg_tool, symmetric=False, blip=False ): + """Transform functional to T1w template.""" wf = pe.Workflow(name=wf_name) inputNode = pe.Node( @@ -2089,7 +2090,7 @@ def bold_to_T1template_xfm_connector( itk_imports = ["import os"] change_transform = pe.Node( - util.Function( + Function( input_names=["input_affine_file"], output_names=["updated_affine_file"], function=change_itk_transform_type, @@ -2253,8 +2254,9 @@ def bold_to_T1template_xfm_connector( }, ) def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1w to template with FSL.""" fsl, outputs = FSL_registration_connector( - f"register_{opt}_anat_to_" f"template_{pipe_num}", cfg, orig="T1w", opt=opt + f"register_{opt}_anat_to_template_{pipe_num}", cfg, orig="T1w", opt=opt ) fsl.inputs.inputspec.interpolation = cfg.registration_workflows[ @@ -2339,8 +2341,9 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1w to symmetric template with FSL.""" fsl, outputs = FSL_registration_connector( - f"register_{opt}_anat_to_" f"template_symmetric_" f"{pipe_num}", + f"register_{opt}_anat_to_template_symmetric_{pipe_num}", cfg, orig="T1w", opt=opt, @@ -2412,11 +2415,9 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N }, ) def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - """Directly register the mean functional to an EPI template. No T1w - involved. - """ + """Directly register the mean functional to an EPI template. No T1w involved.""" fsl, outputs = FSL_registration_connector( - f"register_{opt}_EPI_to_" f"template_{pipe_num}", + f"register_{opt}_EPI_to_template_{pipe_num}", cfg, orig="bold", opt=opt, @@ -2551,12 +2552,13 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1w to template with ANTs.""" params = cfg.registration_workflows["anatomical_registration"]["registration"][ "ANTs" ]["T1_registration"] ants_rc, outputs = ANTs_registration_connector( - "ANTS_T1_to_template_" f"{pipe_num}", cfg, params, orig="T1w" + f"ANTS_T1_to_template_{pipe_num}", cfg, params, orig="T1w" ) ants_rc.inputs.inputspec.interpolation = cfg.registration_workflows[ @@ -2617,6 +2619,7 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) outputs[new_key] = outputs[key] del outputs[key] + return (wf, outputs) @@ -2684,12 +2687,13 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Register T1 to symmetric template with ANTs.""" params = cfg.registration_workflows["anatomical_registration"]["registration"][ "ANTs" ]["T1_registration"] ants, outputs = ANTs_registration_connector( - "ANTS_T1_to_template_" f"symmetric_{pipe_num}", + f"ANTS_T1_to_template_symmetric_{pipe_num}", cfg, params, orig="T1w", @@ -2773,15 +2777,13 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= }, ) def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - """Directly register the mean functional to an EPI template. No T1w - involved. - """ + """Directly register the mean functional to an EPI template. No T1w involved.""" params = cfg.registration_workflows["functional_registration"]["EPI_registration"][ "ANTs" ]["parameters"] ants, outputs = ANTs_registration_connector( - "ANTS_bold_to_EPI-template" f"_{pipe_num}", + f"ANTS_bold_to_EPI-template_{pipe_num}", cfg, params, orig="bold", @@ -2855,6 +2857,7 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Overwrite ANTs transforms with FSL transforms.""" xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -2862,7 +2865,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None if opt.lower() == "fsl" and reg_tool.lower() == "ants": # Apply head-to-head transforms on brain using ABCD-style registration # Convert ANTs warps to FSL warps to be consistent with the functional registration - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/AtlasRegistrationToMNI152_ANTsbased.sh#L134-L172 + # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/e8d373d/PostFreeSurfer/scripts/AtlasRegistrationToMNI152_ANTsbased.sh#L134-L172 # antsApplyTransforms -d 3 -i ${T1wRestore}.nii.gz -r ${Reference} \ # -t ${WD}/xfms/T1w_to_MNI_3Warp.nii.gz \ @@ -2917,8 +2920,8 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # c4d -mcs ${WD}/xfms/ANTs_CombinedWarp.nii.gz -oo ${WD}/xfms/e1.nii.gz ${WD}/xfms/e2.nii.gz ${WD}/xfms/e3.nii.gz # -mcs: -multicomponent-split, -oo: -output-multiple split_combined_warp = pe.Node( - util.Function( - input_names=["input", "output_name"], + Function( + input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, ), @@ -2927,13 +2930,16 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None split_combined_warp.inputs.output_name = "e" wf.connect( - ants_apply_warp_t1_to_template, "output_image", split_combined_warp, "input" + ants_apply_warp_t1_to_template, + "output_image", + split_combined_warp, + "input_name", ) # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz split_combined_inv_warp = pe.Node( - util.Function( - input_names=["input", "output_name"], + Function( + input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, ), @@ -2945,7 +2951,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None ants_apply_warp_template_to_t1, "output_image", split_combined_inv_warp, - "input", + "input_name", ) # fslmaths ${WD}/xfms/e2.nii.gz -mul -1 ${WD}/xfms/e-2.nii.gz @@ -3091,6 +3097,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): + """Create single-band reference for coreg by selecting a functional volume.""" get_func_volume = pe.Node(interface=afni.Calc(), name=f"get_func_volume_{pipe_num}") get_func_volume.inputs.set( @@ -3134,6 +3141,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): + """Create single-band reference for coregistration from mean BOLD.""" coreg_input = strat_pool.get_data("desc-mean_bold") # TODO add mean skull @@ -3174,6 +3182,7 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): outputs=["sbref"], ) def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): + """Generate fMRIPrep-style single-band reference for coregistration.""" coreg_input = strat_pool.get_data("desc-ref_bold") outputs = {"sbref": coreg_input} @@ -3214,6 +3223,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): ], ) def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): + """Coregister BOLD to T1w.""" diff_complete = False if strat_pool.check_rpool("despiked-fieldmap") and strat_pool.check_rpool( "fieldmap-mask" @@ -3223,10 +3233,10 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): if strat_pool.check_rpool("T2w") and cfg.anatomical_preproc["run_t2"]: # monkey data func_to_anat = create_register_func_to_anat_use_T2( - cfg, f"func_to_anat_FLIRT_" f"{pipe_num}" + cfg, f"func_to_anat_FLIRT_{pipe_num}" ) - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/90e7e3f/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 # fslmaths "$fMRIFolder"/"$NameOffMRI"_mc -Tmean "$fMRIFolder"/"$ScoutName"_gdc func_mc_mean = pe.Node( interface=afni_utils.TStat(), name=f"func_motion_corrected_mean_{pipe_num}" @@ -3253,7 +3263,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): # if field map-based distortion correction is on, but BBR is off, # send in the distortion correction files here func_to_anat = create_register_func_to_anat( - cfg, diff_complete, f"func_to_anat_FLIRT_" f"{pipe_num}" + cfg, diff_complete, f"func_to_anat_FLIRT_{pipe_num}" ) func_to_anat.inputs.inputspec.dof = cfg.registration_workflows[ @@ -3311,7 +3321,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): ]["run"] ): func_to_anat_bbreg = create_bbregister_func_to_anat( - diff_complete, f"func_to_anat_" f"bbreg_" f"{pipe_num}" + diff_complete, f"func_to_anat_bbreg_{pipe_num}" ) func_to_anat_bbreg.inputs.inputspec.bbr_schedule = cfg.registration_workflows[ "functional_registration" @@ -3429,14 +3439,15 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): - """Condense the BOLD-to-T1 coregistration transform and the T1-to-template - transform into one transform matrix. + """Create a single transform from BOLD-to-T1 coregistration and T1-to-template. + + Condense the BOLD-to-T1 coregistration transform and the T1-to-template transform into one transform matrix. """ xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) xfm, outputs = bold_to_T1template_xfm_connector( - "create_func_to_T1w" f"template_xfm_{pipe_num}", cfg, reg_tool, symmetric=False + f"create_func_to_T1wtemplate_xfm_{pipe_num}", cfg, reg_tool, symmetric=False ) node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") @@ -3506,14 +3517,16 @@ def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=None): - """Condense the BOLD-to-T1 coregistration transform and the T1-to- - symmetric-template transform into one transform matrix. + """Create a single transform from coregistration and T1-to-symmetric-template. + + Condense the BOLD-to-T1 coregistration transform and the T1-to-symmetric-template + transform into one transform matrix. """ xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-symtemplate_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) xfm, outputs = bold_to_T1template_xfm_connector( - "create_func_to_T1wsymtem" f"plate_xfm_{pipe_num}", + f"create_func_to_T1wsymtemplate_xfm_{pipe_num}", cfg, reg_tool, symmetric=True, @@ -3576,6 +3589,7 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=N outputs=["sbref", "desc-preproc_bold", "desc-stc_bold", "bold"], ) def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + """Apply phasediff to timeseries.""" outputs = {"desc-preproc_bold": strat_pool.get_data("desc-preproc_bold")} if not strat_pool.check_rpool("despiked-fieldmap"): return (wf, outputs) @@ -3606,7 +3620,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= wf.connect(warp_fmap, "out_file", mask_fmap, "in_file") conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -3713,6 +3727,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= outputs=["desc-preproc_bold", "desc-stc_bold", "bold"], ) def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None): + """Apply blip to timeseries.""" xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3753,8 +3768,6 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) "functional_registration" ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] - connect = strat_pool.get_data("desc-preproc_bold") - if opt == "default": node, out = strat_pool.get_data("desc-preproc_bold") out_label = "desc-preproc_bold" @@ -3792,6 +3805,7 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) outputs={"space-template_desc-head_T1w": {"Template": "T1w-template"}}, ) def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp T1 head to template.""" xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3845,6 +3859,7 @@ def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-brain_mask": {"Template": "T1w-template"}}, ) def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp T1 mask to template.""" xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3905,6 +3920,7 @@ def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp timeseries to T1 template.""" xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -3967,6 +3983,7 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp timeseries to T1 template at derivative resolution.""" xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) @@ -4047,11 +4064,15 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None) }, ) def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): - # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries using ABCD-style registration - # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L197 + """Apply motion correction, coreg, anat-to-template transforms... - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 - # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz + ...on raw functional timeseries using ABCD-style registration. + + Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L168-L197 + + https://github.com/DCAN-Labs/DCAN-HCP/blob/a8d495a/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh#L548 + convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz + """ convert_func_to_anat_linear_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_anat_linear_warp_{pipe_num}" ) @@ -4072,7 +4093,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("from-bold_to-T1w_mode-image_desc-linear_xfm") wf.connect(node, out, convert_func_to_anat_linear_warp, "premat") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L140 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L140 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} convert_func_to_standard_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" @@ -4114,7 +4135,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): ) # TODO add condition: if no gradient distortion - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/6466b78/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 extract_func_roi = pe.Node( interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" @@ -4135,7 +4156,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/1d90814/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") @@ -4266,7 +4287,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol merge_func_mask_to_standard = pe.Node( - interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + interface=fslMerge(), name=f"merge_func_mask_to_standard_{pipe_num}" ) merge_func_mask_to_standard.inputs.dimension = "t" @@ -4327,7 +4348,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(convert_dc_warp, "out_file", applywarp_scout, "field_file") - # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 + # https://github.com/DCAN-Labs/DCAN-HCP/blob/1214767/fMRIVolume/scripts/IntensityNormalization.sh#L124-L127 # fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}") @@ -4416,11 +4437,16 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=None): - # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries - # Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh + """ + Apply motion correction, coreg, anat-to-template transforms... - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L131 - # ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} + ...on raw functional timeseries. + + Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh + + https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L131 + ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} + """ anat_resample = pe.Node( interface=fsl.FLIRT(), name=f"anat_resample_func_res_{pipe_num}" ) @@ -4450,7 +4476,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(node, out, applywarp_anat_res, "in_file") wf.connect(anat_resample, "out_file", applywarp_anat_res, "ref_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L136-L138 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L136-L138 # Create brain masks in this space (changing resolution) # ${FSLDIR}/bin/applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz applywarp_anat_mask_res = pe.Node( @@ -4498,7 +4524,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(applywarp_bias_field_res, "out_file", biasfield_thr, "in_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L144-L146 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L144-L146 # convertwarp --relout --rel --warp1=${fMRIToStructuralInput} --warp2=${StructuralToStandard} --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${OutputTransform} convert_func_to_standard_warp = pe.Node( interface=fsl.ConvertWarp(), name=f"convert_func_to_standard_warp_{pipe_num}" @@ -4517,7 +4543,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No applywarp_anat_res, "out_file", convert_func_to_standard_warp, "reference" ) - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L157-L158 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/90e7e3f/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L157-L158 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 extract_func_roi = pe.Node( interface=fsl.ExtractROI(), name=f"extract_func_roi_{pipe_num}" @@ -4538,7 +4564,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(extract_func_roi, "roi_file", multiply_func_roi_by_zero, "in_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L173 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/OneStepResampling.sh#L173 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t split_func = pe.Node(interface=fsl.Split(), name=f"split_func_{pipe_num}") @@ -4656,7 +4682,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol merge_func_mask_to_standard = pe.Node( - interface=fslMerge(), name="merge_func_mask_to_" f"standard_{pipe_num}" + interface=fslMerge(), name=f"merge_func_mask_to_standard_{pipe_num}" ) merge_func_mask_to_standard.inputs.dimension = "t" @@ -4677,7 +4703,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(merge_func_mask_to_standard, "merged_file", find_min_mask, "in_file") - # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/8fe9f61/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 # fslmaths ${InputfMRI} -div ${BiasField} $jacobiancom -mas ${BrainMask} -mas ${InputfMRI}_mask -ing 10000 ${OutputfMRI} -odt float merge_func_mask = pe.Node(util.Merge(3), name=f"merge_operand_files_{pipe_num}") @@ -4760,49 +4786,49 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No def single_step_resample_timeseries_to_T1template( wf, cfg, strat_pool, pipe_num, opt=None ): - """ - Apply motion correction, coreg, anat-to-template transforms on - slice-time corrected functional timeseries based on fMRIPrep - pipeline - - Copyright (c) 2015-2018, the CRN developers team. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - * Neither the name of fmriprep nor the names of its contributors - may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, - INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - OF THE POSSIBILITY OF SUCH DAMAGE. + """Apply motion correction, coreg, anat-to-template transforms... + + ...on slice-time corrected functional timeseries based on fMRIPrep pipeline. Ref: https://github.com/nipreps/fmriprep/blob/84a6005b/fmriprep/workflows/bold/resampling.py#L159-L419 - """ # noqa: 501 + """ + # Copyright (c) 2015-2018, the CRN developers team. + # All rights reserved. + + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: + + # * Redistributions of source code must retain the above copyright + # notice, this list of conditions and the following disclaimer. + + # * Redistributions in binary form must reproduce the above copyright + # notice, this list of conditions and the following disclaimer in the + # documentation and/or other materials provided with the distribution. + + # * Neither the name of fmriprep nor the names of its contributors + # may be used to endorse or promote products derived from this + # software without specific prior written permission. + + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. + + # Modifications copyright (C) 2021 - 2024 C-PAC Developers xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") reg_tool = check_prov_for_regtool(xfm_prov) bbr2itk = pe.Node( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4843,7 +4869,7 @@ def single_step_resample_timeseries_to_T1template( ### Loop starts! ### motionxfm2itk = pe.MapNode( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4864,7 +4890,7 @@ def single_step_resample_timeseries_to_T1template( wf.connect(node, out, motionxfm2itk, "transform_file") elif motion_correct_tool == "3dvolreg": convert_transform = pe.Node( - util.Function( + Function( input_names=["one_d_filename"], output_names=["transform_directory"], function=one_d_to_mat, @@ -5055,6 +5081,7 @@ def single_step_resample_timeseries_to_T1template( }, ) def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp single-band reference to T1 template.""" xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, @@ -5098,6 +5125,7 @@ def warp_sbref_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp BOLD mask to T1 template.""" xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, @@ -5143,8 +5171,9 @@ def warp_bold_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): - """Transform the BOLD mask to template space and to the resolution set for - the derivative outputs. + """Transform the BOLD mask to template space... + + ...and to the resolution set for the derivative outputs. """ xfm = "from-bold_to-template_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( @@ -5181,6 +5210,7 @@ def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-preproc_bold": {"Template": "EPI-template"}}, ) def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp timeseries to EPI template.""" xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm, resource = warp_resource_to_template( wf, cfg, strat_pool, pipe_num, "desc-preproc_bold", xfm, time_series=True @@ -5204,6 +5234,7 @@ def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-mean_bold": {"Template": "EPI-template"}}, ) def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp mean BOLD to EPI template space.""" xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, cfg, strat_pool, pipe_num, "desc-mean_bold", xfm, time_series=False @@ -5227,6 +5258,7 @@ def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): outputs={"space-template_desc-bold_mask": {"Template": "EPI-template"}}, ) def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp BOLD mask to EPI tempalate.""" xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( wf, @@ -5258,8 +5290,9 @@ def warp_bold_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): - """Transform the BOLD mask to template space and to the resolution set for - the derivative outputs. + """Transform the BOLD mask to EPI template space... + + ...and to the resolution set for the derivative outputs. """ xfm = "from-bold_to-EPItemplate_mode-image_xfm" wf, apply_xfm = warp_resource_to_template( @@ -5299,12 +5332,13 @@ def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp tissue masks to T1 template.""" return warp_tissuemask_to_template( wf, cfg, strat_pool, pipe_num, - xfm="from-T1w_to-template_mode-image_" "xfm", + xfm="from-T1w_to-template_mode-image_xfm", template_space="T1", ) @@ -5333,18 +5367,19 @@ def warp_tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): }, ) def warp_tissuemask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + """Warp tissue masks to EPI template.""" return warp_tissuemask_to_template( wf, cfg, strat_pool, pipe_num, - xfm="from-bold_to-EPItemplate_" "mode-image_xfm", + xfm="from-bold_to-EPItemplate_mode-image_xfm", template_space="EPI", ) def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, template_space): - """Function to apply transforms to tissue masks + """Apply transforms to tissue masks. Parameters ---------- @@ -5392,12 +5427,12 @@ def warp_resource_to_template( cfg, strat_pool, pipe_num: int, - input_resource: LIST_OR_STR, + input_resource: list[str] | str, xfm: str, reference: Optional[str] = None, time_series: Optional[bool] = False, -) -> TUPLE[pe.Workflow, pe.Workflow, str]: - """Function to warp a resource into a template space +) -> tuple[pe.Workflow, pe.Workflow, str]: + """Warp a resource into a template space. Parameters ---------- @@ -5460,7 +5495,7 @@ def warp_resource_to_template( ) # set up 'apply_transform' subworkflow apply_xfm = apply_transform( - f"warp_{subwf_input_name}_to_" f"{template_space}template_{pipe_num}", + f"warp_{subwf_input_name}_to_{template_space}template_{pipe_num}", reg_tool, time_series=time_series, num_cpus=cfg.pipeline_setup["system_config"]["max_cores_per_participant"], @@ -5483,8 +5518,8 @@ def warp_resource_to_template( def _warp_return( wf: pe.Workflow, apply_xfm: Optional[pe.Workflow], outputs: dict -) -> TUPLE[pe.Workflow, dict]: - """Check if we have a transform to apply; if not, don't add the outputs""" +) -> tuple[pe.Workflow, dict]: + """Check if we have a transform to apply; if not, don't add the outputs.""" if apply_xfm is None: return wf, {} return wf, outputs diff --git a/CPAC/registration/tests/mocks.py b/CPAC/registration/tests/mocks.py index 0bdf2f678b..4f35595abd 100644 --- a/CPAC/registration/tests/mocks.py +++ b/CPAC/registration/tests/mocks.py @@ -11,7 +11,8 @@ def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" + util.IdentityInterface(fields=["file"]), + name=f"file_node_{file_node_num}", ) input_node.inputs.file = path return input_node, "file" @@ -150,6 +151,7 @@ def configuration_strategy_mock(method="FSL"): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = "RPI" strat.update_resource_pool( {template_name: (resampled_template, "resampled_template")} diff --git a/CPAC/registration/tests/test_registration.py b/CPAC/registration/tests/test_registration.py index 4b8edea0cd..d8e8228497 100755 --- a/CPAC/registration/tests/test_registration.py +++ b/CPAC/registration/tests/test_registration.py @@ -22,15 +22,12 @@ def test_nonlinear_register(): ## input_skull ## reference_brain - mni_file = "/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz" ## reference_skull ## fnirt_config - fnirt_config = "T1_2_MNI152_3mm" ## fnirt_warp_res - fnirt_warp_res = None # ?? what is this for?: func_file = "/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz" @@ -133,7 +130,7 @@ def test_registration_lesion(): anat_preproc.inputs.inputspec.anat = anat_file - lesion_preproc = create_lesion_preproc(wf_name="lesion_preproc") + lesion_preproc = create_lesion_preproc(cfg, wf_name="lesion_preproc") lesion_preproc.inputs.inputspec.lesion = lesion_file diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index a2c0afe6bb..869937df04 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -11,6 +11,9 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. # This string will be sanitized and used in filepaths pipeline_name: cpac-blank-template + + # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" + desired_orientation: RPI output_directory: # Quality control outputs diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py index e73a4d7bc0..048cbe9b1c 100644 --- a/CPAC/resources/tests/test_templates.py +++ b/CPAC/resources/tests/test_templates.py @@ -14,11 +14,12 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -"""Tests for packaged templates""" +"""Tests for packaged templates.""" import os import pytest +import nipype.pipeline.engine as pe from CPAC.pipeline import ALL_PIPELINE_CONFIGS from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool @@ -29,14 +30,14 @@ @pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS) def test_packaged_path_exists(pipeline): """ - Check that all local templates are included in image at at - least one resolution + Check that all local templates are included in image at atleast one resolution. """ - rpool = ingress_pipeconfig_paths( - Preconfiguration(pipeline), ResourcePool(), "pytest" + wf = pe.Workflow(name="test") + wf, rpool = ingress_pipeconfig_paths( + wf, Preconfiguration(pipeline), ResourcePool(), "pytest" ) for resource in rpool.rpool.values(): - node = list(resource.values())[0].get("data")[0] + node = next(iter(resource.values())).get("data")[0] if hasattr(node.inputs, "template") and not node.inputs.template.startswith( "s3:" ): diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index 439d09ab61..25adb1eeca 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2024 C-PAC Developers # This file is part of C-PAC. @@ -14,13 +14,14 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Utilities for sourcing data.""" + import csv import json from pathlib import Path import re -from typing import Union -from nipype import logging +from voluptuous import RequiredFieldInvalid from nipype.interfaces import utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -28,15 +29,12 @@ from CPAC.utils import function from CPAC.utils.bids_utils import bids_remove_entity from CPAC.utils.interfaces.function import Function -from CPAC.utils.typing import TUPLE +from CPAC.utils.monitoring import FMLOGGER from CPAC.utils.utils import get_scan_params -logger = logging.getLogger("nipype.workflow") - def bidsier_prefix(unique_id): - """ - Function to return a BIDSier prefix for a given unique_id + """Return a BIDSier prefix for a given unique_id. Parameters ---------- @@ -67,8 +65,7 @@ def bidsier_prefix(unique_id): def get_rest(scan, rest_dict, resource="scan"): - """Return the file path of the chosen resource stored in the functional - file dictionary, if it exists. + """Return the path of the chosen resource in the functional file dictionary. scan: the scan/series name or label rest_dict: the dictionary read in from the data configuration YAML file @@ -88,25 +85,20 @@ def get_rest(scan, rest_dict, resource="scan"): def extract_scan_params_dct(scan_params_dct): + """Extract the scan parameters dictionary from the data configuration file.""" return scan_params_dct -def get_map(map, map_dct): - # return the spatial map required - return map_dct[map] - - def select_model_files(model, ftest, model_name): - """ - Method to select model files - """ + """Select model files.""" import glob import os files = glob.glob(os.path.join(model, "*")) if len(files) == 0: - raise Exception("No files found inside directory %s" % model) + msg = f"No files found inside directory {model}" + raise FileNotFoundError(msg) fts_file = "" @@ -120,25 +112,25 @@ def select_model_files(model, ftest, model_name): elif (model_name + ".con") in filename: con_file = filename - if ftest == True and fts_file == "": + if ftest and fts_file == "": errmsg = ( "\n[!] CPAC says: You have f-tests included in your group " - "analysis model '%s', but no .fts files were found in the " - "output folder specified for group analysis: %s.\n\nThe " + f"analysis model '{model_name}', but no .fts files were found in the " + f"output folder specified for group analysis: {model}.\n\nThe " ".fts file is automatically generated by CPAC, and if you " "are seeing this error, it is because something went wrong " "with the generation of this file, or it has been moved." - "\n\n" % (model_name, model) ) - raise Exception(errmsg) + raise FileNotFoundError(errmsg) return fts_file, con_file, grp_file, mat_file def check_func_scan(func_scan_dct, scan): - """Run some checks on the functional timeseries-related files for a given - series/scan name or label. + """Run some checks on the functional timeseries-related files. + + For a given series/scan name or label. """ scan_resources = func_scan_dct[scan] @@ -154,7 +146,7 @@ def check_func_scan(func_scan_dct, scan): " scan parameters: /path/to/scan_params.json\n\n" "See the User Guide for more information.\n\n" ) - raise Exception(err) + raise ValueError(err) # actual 4D time series file if "scan" not in scan_resources.keys(): @@ -163,21 +155,23 @@ def check_func_scan(func_scan_dct, scan): "scan file, which should be a filepath labeled with the " "'scan' key.\n\n" ) - raise Exception(err) + raise FileNotFoundError(err) # Nipype restriction (may have changed) if "." in scan or "+" in scan or "*" in scan: - raise Exception( + msg = ( "\n\n[!] Scan names cannot contain any special " "characters (., +, *, etc.). Please update this " f"and try again.\n\nScan: {scan}" "\n\n" ) + raise ValueError(msg) def create_func_datasource(rest_dict, rpool, wf_name="func_datasource"): - """Return the functional timeseries-related file paths for each - series/scan, from the dictionary of functional files described in the data + """Return the functional timeseries-related file paths for each series/scan... + + ...from the dictionary of functional files described in the data configuration (sublist) YAML file. Scan input (from inputnode) is an iterable. @@ -288,8 +282,10 @@ def create_func_datasource(rest_dict, rpool, wf_name="func_datasource"): def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): - """Return the field map files, from the dictionary of functional files - described in the data configuration (sublist) YAML file. + """Return the field map files... + + ...from the dictionary of functional files described in the data configuration + (sublist) YAML file. """ import nipype.interfaces.utility as util @@ -379,6 +375,7 @@ def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"): def get_fmap_phasediff_metadata(data_config_scan_params): + """Return the scan parameters for a field map phasediff scan.""" if ( not isinstance(data_config_scan_params, dict) and ".json" in data_config_scan_params @@ -411,11 +408,10 @@ def get_fmap_phasediff_metadata(data_config_scan_params): ) -@Function.sig_imports(["from CPAC.utils.typing import TUPLE"]) def calc_delta_te_and_asym_ratio( effective_echo_spacing: float, echo_times: list -) -> TUPLE[float, float]: - """Calcluate ``deltaTE`` and ``ees_asym_ratio`` from given metadata +) -> tuple[float, float]: + """Calcluate ``deltaTE`` and ``ees_asym_ratio`` from given metadata. Parameters ---------- @@ -431,15 +427,19 @@ def calc_delta_te_and_asym_ratio( ees_asym_ratio : float """ if not isinstance(effective_echo_spacing, float): - raise LookupError( + msg = ( "C-PAC could not find `EffectiveEchoSpacing` in " "either fmap or func sidecar JSON, but that field " "is required for PhaseDiff distortion correction." ) + raise LookupError(msg) # convert into milliseconds if necessary # these values will/should never be more than 10ms - if ((echo_times[0] * 1000) < 10) and ((echo_times[1] * 1000) < 10): + if ( + ((echo_times[0] * 1000) < 10) # noqa: PLR2004 + and ((echo_times[1] * 1000) < 10) # noqa: PLR2004 + ): echo_times[0] = echo_times[0] * 1000 echo_times[1] = echo_times[1] * 1000 @@ -448,16 +448,17 @@ def calc_delta_te_and_asym_ratio( return deltaTE, ees_asym_ratio -def gather_echo_times(echotime_1, echotime_2=None, echotime_3=None, echotime_4=None): +def gather_echo_times(echotime_1, echotime_2, echotime_3=None, echotime_4=None): + """Gather the echo times from the field map data.""" echotime_list = [echotime_1, echotime_2, echotime_3, echotime_4] echotime_list = list(filter(lambda item: item is not None, echotime_list)) echotime_list = list(set(echotime_list)) - if len(echotime_list) != 2: - raise Exception( - "\n[!] Something went wrong with the field map echo " - "times - there should be two distinct values.\n\n" - f"Echo Times:\n{echotime_list}\n" + if len(echotime_list) != 2: # noqa: PLR2004 + msg = ( + "\n[!] Something went wrong with the field map echo times - there should" + f" be two distinct values.\n\nEcho Times:\n{echotime_list}\n" ) + raise ValueError(msg) return echotime_list @@ -468,7 +469,9 @@ def match_epi_fmaps( epi_fmap_two=None, epi_fmap_params_two=None, ): - """Parse the field map files in the data configuration and determine which + """Match EPI field maps to the BOLD scan. + + Parse the field map files in the data configuration and determine which ones have the same and opposite phase-encoding directions as the BOLD scan in the current pipeline. @@ -520,6 +523,7 @@ def ingress_func_metadata( unique_id=None, num_strat=None, ): + """Ingress metadata for functional scans.""" name_suffix = "" for suffix_part in (unique_id, num_strat): if suffix_part is not None: @@ -531,18 +535,18 @@ def ingress_func_metadata( fmap_TE_list = [] if "fmap" in sub_dict: second = False - for key in sub_dict["fmap"]: + for orig_key in sub_dict["fmap"]: gather_fmap = create_fmap_datasource( - sub_dict["fmap"], f"fmap_gather_{key}_{subject_id}" + sub_dict["fmap"], f"fmap_gather_{orig_key}_{subject_id}" ) gather_fmap.inputs.inputnode.set( subject=subject_id, creds_path=input_creds_path, dl_dir=cfg.pipeline_setup["working_directory"]["path"], ) - gather_fmap.inputs.inputnode.scan = key + gather_fmap.inputs.inputnode.scan = orig_key - orig_key = key + key = orig_key if "epi" in key and not second: key = "epi-1" second = True @@ -672,7 +676,7 @@ def ingress_func_metadata( input_names=["effective_echo_spacing", "echo_times"], output_names=["deltaTE", "ees_asym_ratio"], function=calc_delta_te_and_asym_ratio, - imports=["from typing import Optional, Tuple"], + imports=["from typing import Optional"], ), name=f"diff_distcor_calc_delta{name_suffix}", ) @@ -726,7 +730,6 @@ def ingress_func_metadata( "effective_echo_spacing", ], function=get_scan_params, - imports=["from CPAC.utils.utils import check, try_fetch_parameter"], ), name=f"bold_scan_params_{subject_id}{name_suffix}", ) @@ -803,6 +806,7 @@ def ingress_func_metadata( def create_general_datasource(wf_name): + """Create a general-purpose datasource node.""" import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -846,6 +850,7 @@ def create_general_datasource(wf_name): def create_check_for_s3_node( name, file_path, img_type="other", creds_path=None, dl_dir=None, map_node=False ): + """Create a node to check if a file is on S3.""" if map_node: check_s3_node = pe.MapNode( function.Function( @@ -855,7 +860,7 @@ def create_check_for_s3_node( as_module=True, ), iterfield=["file_path"], - name="check_for_s3_%s" % name, + name=f"check_for_s3_{name}", ) else: check_s3_node = pe.Node( @@ -865,7 +870,7 @@ def create_check_for_s3_node( function=check_for_s3, as_module=True, ), - name="check_for_s3_%s" % name, + name=f"check_for_s3_{name}", ) check_s3_node.inputs.set( @@ -875,10 +880,10 @@ def create_check_for_s3_node( return check_s3_node -# Check if passed-in file is on S3 def check_for_s3( file_path, creds_path=None, dl_dir=None, img_type="other", verbose=False ): + """Check if passed-in file is on S3.""" # Import packages import os @@ -903,8 +908,7 @@ def check_for_s3( # TODO: remove this once scan parameter input as dictionary is phased out if isinstance(file_path, dict): # if this is a dictionary, just skip altogether - local_path = file_path - return local_path + return file_path if file_path.lower().startswith(s3_str): file_path = s3_str + file_path[len(s3_str) :] @@ -922,42 +926,45 @@ def check_for_s3( os.makedirs(local_dir, exist_ok=True) if os.path.exists(local_path): - print(f"{local_path} already exists- skipping download.") + FMLOGGER.info("%s already exists- skipping download.", local_path) else: # Download file try: bucket = fetch_creds.return_bucket(creds_path, bucket_name) - print(f"Attempting to download from AWS S3: {file_path}") + FMLOGGER.info("Attempting to download from AWS S3: %s", file_path) bucket.download_file(Key=s3_key, Filename=local_path) except botocore.exceptions.ClientError as exc: error_code = int(exc.response["Error"]["Code"]) err_msg = str(exc) - if error_code == 403: + if error_code == 403: # noqa: PLR2004 err_msg = ( - 'Access to bucket: "%s" is denied; using credentials ' - 'in subject list: "%s"; cannot access the file "%s"' - % (bucket_name, creds_path, file_path) + f'Access to bucket: "{bucket_name}" is denied; using' + f' credentials in subject list: "{creds_path}"; cannot access' + f' the file "{file_path}"' ) - elif error_code == 404: + error_type = PermissionError + elif error_code == 404: # noqa: PLR2004 err_msg = ( - f"File: {os.path.join(bucket_name, s3_key)} does not exist; check spelling and try " - "again" + f"File: {os.path.join(bucket_name, s3_key)} does not exist;" + " check spelling and try again" ) + error_type = FileNotFoundError else: err_msg = ( - 'Unable to connect to bucket: "%s". Error message:\n%s' - % (bucket_name, exc) + f'Unable to connect to bucket: "{bucket_name}". Error message:' + f"\n{exc}" ) + error_type = ConnectionError - raise Exception(err_msg) + raise error_type(err_msg) except Exception as exc: - err_msg = 'Unable to connect to bucket: "%s". Error message:\n%s' % ( - bucket_name, - exc, + err_msg = ( + f'Unable to connect to bucket: "{bucket_name}". Error message:' + f"\n{exc}" ) - raise Exception(err_msg) + raise ConnectionError(err_msg) # Otherwise just return what was passed in, resolving if a link else: @@ -983,51 +990,48 @@ def check_for_s3( ] ) if local_path in ndmg_atlases["v0"]: - raise FileNotFoundError( - "".join( - [ - "Neuroparc atlas paths were updated on July 20, 2020. " - "C-PAC configuration files using Neuroparc v0 atlas paths " - "(including C-PAC default and preconfigured pipeline " - "configurations from v1.6.2a and earlier) need to be " - "updated to use Neuroparc atlases. Your current " - "configuration includes the Neuroparc v0 path " - f"{local_path} which needs to be updated to ", - ndmg_atlases["v1"][ndmg_atlases["v0"].index(local_path)], - ". For a full list such paths, see https://fcp-indi." - "github.io/docs/nightly/user/ndmg_atlases", - ] - ) + from CPAC.utils.docs import DOCS_URL_PREFIX + + msg = ( + "Neuroparc atlas paths were updated on July 20, 2020. C-PAC" + " configuration files using Neuroparc v0 atlas paths (including C-PAC" + " default and preconfigured pipeline configurations from v1.6.2a and" + " earlier) need to be updated to use Neuroparc atlases. Your current" + f" configuration includes the Neuroparc v0 path {local_path} which" + " needs to be updated to" + f" {ndmg_atlases['v1'][ndmg_atlases['v0'].index(local_path)]}. For a" + f" full list such paths, see {DOCS_URL_PREFIX}/user/ndmg_atlases" ) else: - raise FileNotFoundError(f"File {local_path} does not exist!") + msg = f"File {local_path} does not exist!" + raise FileNotFoundError(msg) if verbose: - print(f"Downloaded file:\n{local_path}\n") + FMLOGGER.info("Downloaded file:\n%s\n", local_path) # Check image dimensionality if local_path.endswith(".nii") or local_path.endswith(".nii.gz"): img_nii = nib.load(local_path) if img_type == "anat": - if len(img_nii.shape) != 3: - raise IOError( - "File: %s must be an anatomical image with 3 " - "dimensions but %d dimensions found!" - % (local_path, len(img_nii.shape)) + if len(img_nii.shape) != 3: # noqa: PLR2004 + msg = ( + f"File: {local_path} must be an anatomical image with 3 " + f"dimensions but {len(img_nii.shape)} dimensions found!" ) elif img_type == "func": if len(img_nii.shape) not in [3, 4]: - raise IOError( - "File: %s must be a functional image with 3 or " - "4 dimensions but %d dimensions found!" - % (local_path, len(img_nii.shape)) + msg = ( + f"File: {local_path} must be a functional image with 3 or " + f"4 dimensions but {len(img_nii.shape)} dimensions found!" ) + raise IOError(msg) return local_path def gather_extraction_maps(c): + """Gather the timeseries and SCA analysis configurations.""" ts_analysis_dict = {} sca_analysis_dict = {} @@ -1052,7 +1056,7 @@ def gather_extraction_maps(c): "set to run, but no ROI NIFTI file paths were " "provided!\n\n" ) - raise Exception(err) + raise RequiredFieldInvalid(err) if c.seed_based_correlation_analysis["run"]: try: @@ -1063,13 +1067,13 @@ def gather_extraction_maps(c): "is set to run, but no ROI NIFTI file paths were " "provided!\n\n" ) - raise Exception(err) + raise RequiredFieldInvalid(err) # flip the dictionary for roi_path in sca_roi_dict.keys(): # update analysis dict - for analysis_type in sca_roi_dict[roi_path].split(","): - analysis_type = analysis_type.replace(" ", "") + for _analysis_type in sca_roi_dict[roi_path].split(","): + analysis_type = _analysis_type.replace(" ", "") if analysis_type not in sca_analysis_dict.keys(): sca_analysis_dict[analysis_type] = [] @@ -1079,8 +1083,10 @@ def gather_extraction_maps(c): return (ts_analysis_dict, sca_analysis_dict) -def get_highest_local_res(template: Union[Path, str], tagname: str) -> Path: - """Given a reference template path and a resolution string, get all +def get_highest_local_res(template: Path | str, tagname: str) -> Path: + """Return the highest resolution of a template in the same local path. + + Given a reference template path and a resolution string, get all resolutions of that template in the same local path and return the highest resolution. @@ -1128,12 +1134,12 @@ def get_highest_local_res(template: Union[Path, str], tagname: str) -> Path: try: return matching_templates[0] except (FileNotFoundError, IndexError): - raise LookupError(f"Could not find template {template}") + msg = f"Could not find template {template}" + raise LookupError(msg) def res_string_to_tuple(resolution): - """ - Converts a resolution string to a tuple of floats. + """Convert a resolution string to a tuple of floats. Parameters ---------- @@ -1150,7 +1156,8 @@ def res_string_to_tuple(resolution): return (float(resolution.replace("mm", "")),) * 3 -def resolve_resolution(resolution, template, template_name, tag=None): +def resolve_resolution(orientation, resolution, template, template_name, tag=None): + """Resample a template to a given resolution.""" from nipype.interfaces import afni from CPAC.pipeline import nipype_pipeline_engine as pe @@ -1196,6 +1203,7 @@ def resolve_resolution(resolution, template, template_name, tag=None): resample.inputs.resample_mode = "Cu" resample.inputs.in_file = local_path resample.base_dir = "." + resample.inputs.orientation = orientation resampled_template = resample.run() local_path = resampled_template.outputs.out_file @@ -1204,6 +1212,7 @@ def resolve_resolution(resolution, template, template_name, tag=None): def create_anat_datasource(wf_name="anat_datasource"): + """Create a dataflow for anatomical images.""" import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -1245,12 +1254,13 @@ def create_anat_datasource(wf_name="anat_datasource"): def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): + """Create a dataflow for ROI masks.""" import os mask_dict = {} - for mask_file in masks: - mask_file = mask_file.rstrip("\r\n") + for _mask_file in masks: + mask_file = _mask_file.rstrip("\r\n") if mask_file.strip() == "" or mask_file.startswith("#"): continue @@ -1263,22 +1273,23 @@ def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): try: valid_extensions = [".nii", ".nii.gz"] - base_name = [ + base_name = next( base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ) for key in ["res", "space"]: base_name = bids_remove_entity(base_name, key) except IndexError: # pylint: disable=raise-missing-from - raise ValueError( + msg = ( "Error in spatial_map_dataflow: File " f'extension of {base_file} not ".nii" or ' ".nii.gz" ) + raise ValueError(msg) except Exception as e: raise e @@ -1286,10 +1297,11 @@ def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): base_name = format_identifier(name, desc) if base_name in mask_dict: - raise ValueError( + msg = ( "Duplicate templates/atlases not allowed: " f"{mask_file} {mask_dict[base_name]}" ) + raise ValueError(msg) mask_dict[base_name] = mask_file @@ -1336,38 +1348,40 @@ def create_roi_mask_dataflow(masks, wf_name="datasource_roi_mask"): def create_spatial_map_dataflow(spatial_maps, wf_name="datasource_maps"): + """Create a dataflow for spatial maps.""" import os wf = pe.Workflow(name=wf_name) spatial_map_dict = {} - for spatial_map_file in spatial_maps: - spatial_map_file = spatial_map_file.rstrip("\r\n") + for _spatial_map_file in spatial_maps: + spatial_map_file = _spatial_map_file.rstrip("\r\n") base_file = os.path.basename(spatial_map_file) try: valid_extensions = [".nii", ".nii.gz"] - base_name = [ + base_name = next( base_file[: -len(ext)] for ext in valid_extensions if base_file.endswith(ext) - ][0] + ) if base_name in spatial_map_dict: - raise ValueError( - "Files with same name not allowed: %s %s" - % (spatial_map_file, spatial_map_dict[base_name]) + msg = ( + f"Files with same name not allowed: {spatial_map_file}" + f" {spatial_map_dict[base_name]}" ) + raise ValueError(msg) spatial_map_dict[base_name] = spatial_map_file except IndexError: - raise Exception( - "Error in spatial_map_dataflow: " - "File extension not in .nii and .nii.gz" + msg = ( + "Error in spatial_map_dataflow: File extension not in .nii and .nii.gz" ) + raise ValueError(msg) inputnode = pe.Node( util.IdentityInterface( @@ -1412,6 +1426,7 @@ def create_spatial_map_dataflow(spatial_maps, wf_name="datasource_maps"): def create_grp_analysis_dataflow(wf_name="gp_dataflow"): + """Create a dataflow for group analysis.""" import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe @@ -1456,16 +1471,17 @@ def create_grp_analysis_dataflow(wf_name="gp_dataflow"): def resample_func_roi(in_func, in_roi, realignment, identity_matrix): + """Resample functional image to ROI or ROI to functional image using flirt.""" import os - import nibabel as nb + import nibabel as nib from CPAC.utils.monitoring.custom_logging import log_subprocess # load func and ROI dimension - func_img = nb.load(in_func) + func_img = nib.load(in_func) func_shape = func_img.shape - roi_img = nb.load(in_roi) + roi_img = nib.load(in_roi) roi_shape = roi_img.shape # check if func size = ROI size, return func and ROI; else resample using flirt diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py index 084f299c0a..ea16c0be36 100644 --- a/CPAC/utils/test_mocks.py +++ b/CPAC/utils/test_mocks.py @@ -11,7 +11,8 @@ def file_node(path, file_node_num=0): input_node = pe.Node( - util.IdentityInterface(fields=["file"]), name=f"file_node_{file_node_num}" + util.IdentityInterface(fields=["file"]), + name=f"file_node_{file_node_num}", ) input_node.inputs.file = path return input_node, "file" @@ -34,7 +35,7 @@ def configuration_strategy_mock(method="FSL"): "functional_registration": { "EPI_registration": { "FSL-FNIRT": { - "identity_matrix": f"{fsldir}/etc/flirtsch/" "ident.mat", + "identity_matrix": f"{fsldir}/etc/flirtsch/ident.mat", "interpolation": "sinc", } }, @@ -104,11 +105,11 @@ def configuration_strategy_mock(method="FSL"): ), "anatomical_brain": os.path.join( c["pipeline_setup", "output_directory", "path"], - "anatomical_brain/" "sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", + "anatomical_brain/sub-M10978008_ses-NFB3_acq-ao_brain_resample.nii.gz", ), "ants_initial_xfm": os.path.join( c["pipeline_setup", "output_directory", "path"], - "ants_initial_xfm/" "transform0DerivedInitialMovingTranslation.mat", + "ants_initial_xfm/transform0DerivedInitialMovingTranslation.mat", ), "ants_affine_xfm": os.path.join( c["pipeline_setup", "output_directory", "path"], @@ -131,7 +132,7 @@ def configuration_strategy_mock(method="FSL"): ), "ants_symm_warp_field": os.path.join( c["pipeline_setup", "output_directory", "path"], - "anatomical_to_symmetric_mni_nonlinear_xfm/" "transform3Warp.nii.gz", + "anatomical_to_symmetric_mni_nonlinear_xfm/transform3Warp.nii.gz", ), "ants_symm_affine_xfm": os.path.join( c["pipeline_setup", "output_directory", "path"], @@ -234,6 +235,7 @@ def configuration_strategy_mock(method="FSL"): resampled_template.inputs.template = template resampled_template.inputs.template_name = template_name resampled_template.inputs.tag = tag + resampled_template.inputs.orientation = "RPI" strat.update_resource_pool( {template_name: (resampled_template, "resampled_template")} From 590b43711648fb628f5eabede555b5aaa8d6157a Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 5 Nov 2024 12:47:38 -0500 Subject: [PATCH 21/58] adding f to f-string fixing typo --- CPAC/connectome/connectivity_matrix.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/connectome/connectivity_matrix.py b/CPAC/connectome/connectivity_matrix.py index c0be9f3f27..38c0411e1b 100644 --- a/CPAC/connectome/connectivity_matrix.py +++ b/CPAC/connectome/connectivity_matrix.py @@ -171,7 +171,7 @@ def create_connectome_afni(name, method, pipe_num): imports=["import subprocess"], function=strip_afni_output_header, ), - name="netcorrStripHeader{method}_{pipe_num}", + name=f"netcorrStripHeader{method}_{pipe_num}", ) name_output_node = pe.Node( From 043a004380075203971ad6c205200a1555389ded Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 7 Nov 2024 10:39:20 -0500 Subject: [PATCH 22/58] changing bold to desc-reorient_bold --- CPAC/nuisance/nuisance.py | 4 ++-- CPAC/registration/registration.py | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index e353aae03b..48a7686d7e 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -75,8 +75,8 @@ def choose_nuisance_blocks(cfg, rpool, generate_only=False): ] apply_transform_using = to_template_cfg["apply_transform"]["using"] input_interface = { - "default": ("desc-preproc_bold", ["desc-preproc_bold", "bold"]), - "abcd": ("desc-preproc_bold", "bold"), + "default": ("desc-preproc_bold", ["desc-preproc_bold", "desc-reorient_bold"]), + "abcd": ("desc-preproc_bold", "desc-reorient_bold"), "single_step_resampling_from_stc": ("desc-preproc_bold", "desc-stc_bold"), }.get(apply_transform_using) if input_interface is not None: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 33af078797..4848637d4d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3093,7 +3093,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None "input", ], option_val="Selected_Functional_Volume", - inputs=[("desc-brain_bold", ["desc-motion_bold", "bold"], "sbref")], + inputs=[("desc-brain_bold", ["desc-motion_bold", "desc-reorient_bold"], "sbref")], outputs=["sbref"], ) def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): @@ -3115,7 +3115,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): else: # TODO check which file is functional_skull_leaf # TODO add a function to choose brain or skull? - node, out = strat_pool.get_data(["desc-motion_bold", "bold"]) + node, out = strat_pool.get_data(["desc-motion_bold", "desc-reorient_bold"]) wf.connect(node, out, get_func_volume, "in_file_a") @@ -3579,7 +3579,7 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=N "sbref", "desc-preproc_bold", "desc-stc_bold", - "bold", + "desc-reorient_bold", "from-bold_to-T1w_mode-image_desc-linear_xfm", ), "despiked-fieldmap", @@ -3667,7 +3667,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= node, out = strat_pool.get_data("desc-stc_bold") out_label = "desc-stc_bold" elif opt == "abcd": - node, out = strat_pool.get_data("bold") + node, out = strat_pool.get_data("desc-reorient_bold") out_label = "bold" wf.connect(node, out, warp_bold, "in_file") @@ -3718,7 +3718,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= "sbref", "desc-preproc_bold", "desc-stc_bold", - "bold", + "desc-reorient_bold", "from-bold_to-template_mode-image_xfm", "ants-blip-warp", "fsl-blip-warp", @@ -3775,8 +3775,8 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) node, out = strat_pool.get_data("desc-stc_bold") out_label = "desc-stc_bold" elif opt == "abcd": - node, out = strat_pool.get_data("bold") - out_label = "bold" + node, out = strat_pool.get_data("desc-reorient_bold") + out_label = "desc-reorient_bold" wf.connect(node, out, apply_xfm, "inputspec.input_image") @@ -4421,7 +4421,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): option_val="dcan_nhp", inputs=[ ( - ["desc-reorient_bold", "bold"], + ["desc-reorient_bold", "desc-preproc_bold"], "coordinate-transformation", "from-T1w_to-template_mode-image_warp", "from-bold_to-T1w_mode-image_desc-linear_warp", @@ -4552,7 +4552,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 - node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + node, out = strat_pool.get_data(["desc-reorient_bold", "desc-preproc_bold"]) wf.connect(node, out, extract_func_roi, "in_file") # fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp @@ -4570,7 +4570,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No split_func.inputs.dimension = "t" - node, out = strat_pool.get_data(["desc-reorient_bold", "bold"]) + node, out = strat_pool.get_data(["desc-reorient_bold", "desc-preproc_bold"]) wf.connect(node, out, split_func, "in_file") ### Loop starts! ### From c725a9db37c7bc29c53ce8ba3224f890a3f719cb Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 11 Nov 2024 14:36:41 -0500 Subject: [PATCH 23/58] :bug: :alien: Patch `NetCorr._list_outputs` --- CPAC/utils/interfaces/netcorr.py | 55 ++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/CPAC/utils/interfaces/netcorr.py b/CPAC/utils/interfaces/netcorr.py index aee9a4d13d..728afdeef1 100644 --- a/CPAC/utils/interfaces/netcorr.py +++ b/CPAC/utils/interfaces/netcorr.py @@ -19,6 +19,61 @@ class NetCorr(NipypeNetCorr): # noqa: D101 input_spec = NetCorrInputSpec + def _list_outputs(self): + """``nipype.interfaces.afni.preprocess.NetCorr._list_outputs`` with a bugfix. + + Notes + ----- + This method can be removed once nipy/nipype#3697 is merged and a release + including that PR is included in the C-PAC image. + """ + # STATEMENT OF CHANGES: + # This function is derived from sources licensed under the Apache-2.0 terms, + # and this function has been changed. + + # CHANGES: + # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being perged. + + # ORIGINAL WORK'S ATTRIBUTION NOTICE: + # Copyright (c) 2009-2016, Nipype developers + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + # Prior to release 0.12, Nipype was licensed under a BSD license. + + # Modifications copyright (C) 2024 C-PAC Developers + import glob + import os + + from nipype.interfaces.base.traits_extension import isdefined + + outputs = self.output_spec().get() + + if not isdefined(self.inputs.out_file): + prefix = self._gen_fname(self.inputs.in_file, suffix="_netcorr") + else: + prefix = self.inputs.out_file + + # All outputs should be in the same directory as the prefix + odir = os.path.dirname(os.path.abspath(prefix)) + outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0] + + if self.inputs.ts_wb_corr or self.inputs.ts_wb_Z: + corrdir = os.path.join(odir, prefix + "_000_INDIV") + outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz")) + + return outputs + NetCorr.__doc__ = f"""{NipypeNetCorr.__doc__} `CPAC.utils.interfaces.netcorr.NetCorr` adds an additional optional input, `automask_off` From f1dac0c6d85c90f0dfe8aa9702a5764dabcdcc4c Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 09:35:39 -0500 Subject: [PATCH 24/58] :pencil2: Fix "perged" typo in comment --- CPAC/utils/interfaces/netcorr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/utils/interfaces/netcorr.py b/CPAC/utils/interfaces/netcorr.py index 728afdeef1..6af44a15ab 100644 --- a/CPAC/utils/interfaces/netcorr.py +++ b/CPAC/utils/interfaces/netcorr.py @@ -32,7 +32,7 @@ def _list_outputs(self): # and this function has been changed. # CHANGES: - # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being perged. + # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being merged and released. # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers From be61d7f392376ed41980287d3e89706d20f28caf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 10:46:21 -0500 Subject: [PATCH 25/58] :sparkles: Add inventory utility --- CPAC/pipeline/engine.py | 16 +- CPAC/pipeline/resource_inventory.py | 300 ++++++++++++++++++++++++++++ CPAC/utils/outputs.py | 26 ++- setup.py | 9 +- 4 files changed, 340 insertions(+), 11 deletions(-) create mode 100755 CPAC/pipeline/resource_inventory.py diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index be1d0c0c17..7494ae92ee 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1,4 +1,4 @@ -# Copyright (C) 2021-2024 C-PAC Developers +# Copyright (C) 2021-2025 C-PAC Developers # This file is part of C-PAC. @@ -17,6 +17,7 @@ import ast import copy import hashlib +from importlib.resources import files from itertools import chain import json import os @@ -24,6 +25,7 @@ from typing import Optional import warnings +import pandas as pd from nipype import config, logging from nipype.interfaces import afni from nipype.interfaces.utility import Rename @@ -2408,15 +2410,17 @@ def strip_template(data_label, dir_path, filename): return data_label, json +def template_dataframe() -> pd.DataFrame: + """Return the template dataframe.""" + template_csv = files("CPAC").joinpath("resources/cpac_templates.csv") + return pd.read_csv(str(template_csv), keep_default_na=False) + + def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None): # ingress config file paths # TODO: may want to change the resource keys for each to include one level up in the YAML as well - import pandas as pd - import pkg_resources as p - - template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv") - template_df = pd.read_csv(template_csv, keep_default_na=False) + template_df = template_dataframe() desired_orientation = cfg.pipeline_setup["desired_orientation"] for row in template_df.itertuples(): diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py new file mode 100755 index 0000000000..fc0abc2bfa --- /dev/null +++ b/CPAC/pipeline/resource_inventory.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Inspect inputs and outputs for NodeBlockFunctions.""" + +import ast +from dataclasses import dataclass, field +import importlib +from importlib.resources import files +import inspect +from itertools import chain +import os +from typing import Any, cast, Iterable + +import yaml + +from CPAC.pipeline.engine import template_dataframe +from CPAC.pipeline.nodeblock import NodeBlockFunction +from CPAC.utils.monitoring import UTLOGGER +from CPAC.utils.outputs import Outputs + + +def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: + """ + Import all functions with the @nodeblock decorator from all modules and submodules in a package. + + Parameters + ---------- + package_name + The name of the package to import from. + """ + functions: list[NodeBlockFunction] = [] + package = importlib.import_module(package_name) + package_path = package.__path__[0] # Path to the package directory + + for root, _, package_files in os.walk(package_path): + for file in package_files: + if file.endswith(".py") and file != "__init__.py": + # Get the module path + rel_path = os.path.relpath(os.path.join(root, file), package_path) + module_name = f"{package_name}.{rel_path[:-3].replace(os.sep, '.')}" + + # Import the module + try: + module = importlib.import_module(module_name) + except ImportError as e: + UTLOGGER.debug(f"Failed to import {module_name}: {e}") + continue + # Extract nodeblock-decorated functions from the module + for _name, obj in inspect.getmembers( + module, predicate=lambda obj: isinstance(obj, NodeBlockFunction) + ): + functions.append(obj) + + return functions + + +@dataclass +class ResourceSourceList: + """A list of resource sources without duplicates.""" + + sources: list[str] = field(default_factory=list) + + def __add__(self, other: str | list[str]) -> list[str]: + """Add a list of sources to the list.""" + if isinstance(other, str): + other = [other] + new_set = {*self.sources, *other} + return sorted(new_set, key=str.casefold) + + def __contains__(self, item: str) -> bool: + """Check if a source is in the list.""" + return item in self.sources + + def __delitem__(self, key: int) -> None: + """Delete a source by index.""" + del self.sources[key] + + def __eq__(self, value: Any) -> bool: + """Check if the lists of sources are the same.""" + return set(self) == set(value) + + def __getitem__(self, item: int) -> str: + """Get a source by index.""" + return self.sources[item] + + def __hash__(self) -> int: + """Get the hash of the list of sources.""" + return hash(self.sources) + + def __iadd__(self, other: str | list[str]) -> "ResourceSourceList": + """Add a list of sources to the list.""" + self.sources = self + other + return self + + def __iter__(self): + """Iterate over the sources.""" + return iter(self.sources) + + def __len__(self) -> int: + """Get the number of sources.""" + return len(self.sources) + + def __repr__(self) -> str: + """Get the reproducable string representation of the sources.""" + return f"ResourceSourceList({(self.sources)})" + + def __reversed__(self) -> list[str]: + """Get the sources reversed.""" + return list(reversed(self.sources)) + + def __setitem__(self, key: int, value: str) -> None: + """Set a source by index.""" + self.sources[key] = value + + def __sorted__(self) -> list[str]: + """Get the sources sorted.""" + return sorted(self.sources, key=str.casefold) + + def __str__(self) -> str: + """Get the string representation of the sources.""" + return str(self.sources) + + +@dataclass +class ResourceIO: + """NodeBlockFunctions that use a resource for IO.""" + + name: str + """The name of the resource.""" + output_from: ResourceSourceList | list[str] = field( + default_factory=ResourceSourceList + ) + """The functions that output the resource.""" + output_to: ResourceSourceList | list[str] = field( + default_factory=ResourceSourceList + ) + """The subdirectory the resource is output to.""" + input_for: ResourceSourceList | list[str] = field( + default_factory=ResourceSourceList + ) + """The functions that use the resource as input.""" + + def __post_init__(self) -> None: + """Handle optionals.""" + if isinstance(self.output_from, list): + self.output_from = ResourceSourceList(self.output_from) + if isinstance(self.output_to, list): + self.output_to = ResourceSourceList(self.output_to) + if isinstance(self.input_for, list): + self.input_for = ResourceSourceList(self.input_for) + + def __str__(self) -> str: + """Return string representation for ResourceIO instance.""" + return f"{{{self.name}: {{'input_for': {self.input_for!s}, 'output_from': {self.output_from!s}}}}})" + + def as_dict(self) -> dict[str, list[str]]: + """Return the ResourceIO as a built-in dictionary type.""" + return { + k: v + for k, v in { + "input_for": [str(source) for source in self.input_for], + "output_from": [str(source) for source in self.output_from], + "output_to": [str(source) for source in self.output_to], + }.items() + if v + } + + +def _flatten_io(io: list[Iterable]) -> list[str]: + """Given a list of strings or iterables thereof, flatten the list to all strings.""" + if all(isinstance(resource, str) for resource in io): + return cast(list[str], io) + while not all(isinstance(resource, str) for resource in io): + io = list( + chain.from_iterable( + [ + resource if not isinstance(resource, str) else [resource] + for resource in io + ] + ) + ) + return cast(list[str], io) + + +def find_directly_set_resources(package_name: str) -> dict[str, list[str]]: + """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. + + Parameters + ---------- + package_name + The name of the package to search for resources. + + Returns + ------- + dict + A dictionary containing the name of the resource and the name of the functions that set it. + """ + resources: dict[str[list[str]]] = {} + for dirpath, _, filenames in os.walk(str(files(package_name))): + for filename in filenames: + if filename.endswith(".py"): + filepath = os.path.join(dirpath, filename) + with open(filepath, "r", encoding="utf-8") as file: + tree = ast.parse(file.read(), filename=filepath) + for node in ast.walk(tree): + if isinstance(node, ast.Call) and isinstance( + node.func, ast.Attribute + ): + if node.func.attr == "set_data": + try: + resource: str = ast.literal_eval(node.args[0]) + if resource not in resources: + resources[resource] = [] + resources[resource].append( + ast.literal_eval(node.args[-1]) + ) + except ValueError: + # The resource name or function name is not a literal, so this `set_data` is a dynamic call + pass + return resources + + +def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: + """Gather all inputs and outputs for a list of NodeBlockFunctions.""" + resources: dict[str, ResourceIO] = {} + # Node block function inputs and outputs + for nbf in import_nodeblock_functions(package): + nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" + if hasattr(nbf, "inputs"): + for nbf_input in _flatten_io(cast(list[Iterable], nbf.inputs)): + if nbf_input: + if nbf_input not in resources: + resources[nbf_input] = ResourceIO( + nbf_input, input_for=[nbf_name] + ) + else: + resources[nbf_input].input_for += nbf_name + if hasattr(nbf, "outputs"): + for nbf_output in _flatten_io(cast(list[Iterable], nbf.outputs)): + if nbf_output: + if nbf_output not in resources: + resources[nbf_output] = ResourceIO( + nbf_output, output_from=[nbf_name] + ) + else: + resources[nbf_output].output_from += nbf_name + # Template resources set from pipeline config + templates_from_config_df = template_dataframe() + for _, row in templates_from_config_df.iterrows(): + output_from = f"pipeline configuration: {row.Pipeline_Config_Entry}" + if row.Key not in resources: + resources[row.Key] = ResourceIO(row.Key, output_from=[output_from]) + else: + resources[row.Key].output_from += output_from + # Hard-coded resources + for resource, functions in find_directly_set_resources(package).items(): + if resource not in resources: + resources[resource] = ResourceIO(resource, output_from=functions) + else: + resources[resource].output_from += functions + # Outputs + for _, row in Outputs.reference.iterrows(): + if row.Resource not in resources: + resources[row.Resource] = ResourceIO( + row.Resource, output_to=[row["Sub-Directory"]] + ) + else: + resources[row.Resource].output_to += row["Sub-Directory"] + return dict(sorted(resources.items(), key=lambda item: item[0].casefold())) + + +def dump_inventory_to_yaml(inventory: dict[str, ResourceIO]) -> str: + """Dump NodeBlock Interfaces to a YAML string.""" + return yaml.dump( + {key: value.as_dict() for key, value in inventory.items()}, sort_keys=False + ) + + +def main() -> None: + """Print the NodeBlock IO to the console.""" + UTLOGGER.info(dump_inventory_to_yaml(resource_inventory("CPAC"))) # noqa: T201 + + +if __name__ == "__main__": + main() diff --git a/CPAC/utils/outputs.py b/CPAC/utils/outputs.py index 11b81eb60f..451d893987 100644 --- a/CPAC/utils/outputs.py +++ b/CPAC/utils/outputs.py @@ -1,10 +1,30 @@ +# Copyright (C) 2018-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Specify the resources that C-PAC writes to the output direcotry.""" + +from importlib.resources import files + import pandas as pd -import pkg_resources as p class Outputs: - # Settle some things about the resource pool reference and the output directory - reference_csv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv") + """Settle some things about the resource pool reference and the output directory.""" + + reference_csv = str(files("CPAC").joinpath("resources/cpac_outputs.tsv")) try: reference = pd.read_csv(reference_csv, delimiter="\t", keep_default_na=False) diff --git a/setup.py b/setup.py index 17919395d2..bb20b66c6c 100755 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2024 C-PAC Developers +# Copyright (C) 2022-2025 C-PAC Developers # This file is part of C-PAC. @@ -84,7 +84,12 @@ def main(**extra_args): extras_require={"graphviz": ["pygraphviz"]}, configuration=configuration, scripts=glob("scripts/*"), - entry_points={"console_scripts": ["cpac = CPAC.__main__:main"]}, + entry_points={ + "console_scripts": [ + "cpac = CPAC.__main__:main", + "C-PAC_nb_io = CPAC.pipeline.nb_io:main", + ] + }, package_data={ "CPAC": [ "test_data/*", From 5758eedad189fed6bde65a895d1a100095fcb296 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 10:48:00 -0500 Subject: [PATCH 26/58] =?UTF-8?q?:bug:=20Fix=20resource=20name:=20`unet=5F?= =?UTF-8?q?model`=20=E2=86=92=20`unet-model`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CPAC/anat_preproc/anat_preproc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index a561f8e077..f4bd6f7049 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2025 C-PAC Developers # This file is part of C-PAC. @@ -2572,7 +2572,7 @@ def brain_mask_acpc_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None) config=["anatomical_preproc", "brain_extraction"], option_key="using", option_val="UNet", - inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet_model"], + inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet-model"], outputs=["space-T2w_desc-brain_mask"], ) def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): @@ -2586,7 +2586,7 @@ def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): config=["anatomical_preproc", "brain_extraction"], option_key="using", option_val="UNet", - inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet_model"], + inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet-model"], outputs=["space-T2w_desc-acpcbrain_mask"], ) def brain_mask_acpc_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None): From 236e0febc0af3a34f26630260042793cc5240598 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 11:07:53 -0500 Subject: [PATCH 27/58] :children_crossing: Add CLI for resource inventory --- CPAC/pipeline/resource_inventory.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index fc0abc2bfa..28c9937cb5 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -17,6 +17,7 @@ # License along with C-PAC. If not, see . """Inspect inputs and outputs for NodeBlockFunctions.""" +from argparse import ArgumentParser, Namespace import ast from dataclasses import dataclass, field import importlib @@ -24,6 +25,7 @@ import inspect from itertools import chain import os +from pathlib import Path from typing import Any, cast, Iterable import yaml @@ -181,6 +183,22 @@ def as_dict(self) -> dict[str, list[str]]: } +def cli_parser() -> Namespace: + """Parse command line argument.""" + parser = ArgumentParser( + description="Inventory resources for C-PAC NodeBlockFunctions." + ) + parser.add_argument( + "-o", + "--output", + nargs="?", + help="The output file to write the inventory to.", + type=Path, + default=Path("resource_inventory.yaml"), + ) + return parser.parse_args() + + def _flatten_io(io: list[Iterable]) -> list[str]: """Given a list of strings or iterables thereof, flatten the list to all strings.""" if all(isinstance(resource, str) for resource in io): @@ -292,8 +310,10 @@ def dump_inventory_to_yaml(inventory: dict[str, ResourceIO]) -> str: def main() -> None: - """Print the NodeBlock IO to the console.""" - UTLOGGER.info(dump_inventory_to_yaml(resource_inventory("CPAC"))) # noqa: T201 + """Save the NodeBlock inventory to a file.""" + args = cli_parser() + with args.output.open("w") as file: + file.write(dump_inventory_to_yaml(resource_inventory("CPAC"))) if __name__ == "__main__": From 36528bdb81243550480cc1ab9f88887d36123d8e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 6 Jan 2025 11:19:45 -0500 Subject: [PATCH 28/58] :children_crossing: Rename `resource_inventory` CLI command --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bb20b66c6c..f22a744e2d 100755 --- a/setup.py +++ b/setup.py @@ -87,7 +87,7 @@ def main(**extra_args): entry_points={ "console_scripts": [ "cpac = CPAC.__main__:main", - "C-PAC_nb_io = CPAC.pipeline.nb_io:main", + "resource_inventory = CPAC.pipeline.resource_inventory:main", ] }, package_data={ From 20ce4289028378c48c35a86987b1dd42a89fa1be Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 7 Jan 2025 14:14:59 -0500 Subject: [PATCH 29/58] :children_crossing: Specify default in helpstring --- CPAC/pipeline/resource_inventory.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 28c9937cb5..cee3570dc1 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -17,7 +17,7 @@ # License along with C-PAC. If not, see . """Inspect inputs and outputs for NodeBlockFunctions.""" -from argparse import ArgumentParser, Namespace +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace import ast from dataclasses import dataclass, field import importlib @@ -186,7 +186,8 @@ def as_dict(self) -> dict[str, list[str]]: def cli_parser() -> Namespace: """Parse command line argument.""" parser = ArgumentParser( - description="Inventory resources for C-PAC NodeBlockFunctions." + description="Inventory resources for C-PAC NodeBlockFunctions.", + formatter_class=ArgumentDefaultsHelpFormatter, ) parser.add_argument( "-o", From 780600a2b4b0544c9fc8419c7a2be49b1a46fc2d Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 9 Jan 2025 10:44:32 -0500 Subject: [PATCH 30/58] :zap: Don't install torch just to look for NodeBlockFunctions --- CPAC/pipeline/resource_inventory.py | 31 ++++++++++++++++++++++++----- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index cee3570dc1..85090f533b 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -26,8 +26,10 @@ from itertools import chain import os from pathlib import Path -from typing import Any, cast, Iterable +from typing import Any, cast, Iterable, Optional +from unittest.mock import patch +from traits.trait_errors import TraitError import yaml from CPAC.pipeline.engine import template_dataframe @@ -36,7 +38,9 @@ from CPAC.utils.outputs import Outputs -def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: +def import_nodeblock_functions( + package_name: str, exclude: Optional[list[str]] = None +) -> list[NodeBlockFunction]: """ Import all functions with the @nodeblock decorator from all modules and submodules in a package. @@ -44,7 +48,12 @@ def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: ---------- package_name The name of the package to import from. + + exclude + A list of module names to exclude from the import. """ + if exclude is None: + exclude = [] functions: list[NodeBlockFunction] = [] package = importlib.import_module(package_name) package_path = package.__path__[0] # Path to the package directory @@ -55,11 +64,16 @@ def import_nodeblock_functions(package_name: str) -> list[NodeBlockFunction]: # Get the module path rel_path = os.path.relpath(os.path.join(root, file), package_path) module_name = f"{package_name}.{rel_path[:-3].replace(os.sep, '.')}" + if module_name in exclude: + continue # Import the module try: - module = importlib.import_module(module_name) - except ImportError as e: + with patch.dict( + "sys.modules", {exclusion: None for exclusion in exclude} + ): + module = importlib.import_module(module_name) + except (ImportError, TraitError, ValueError) as e: UTLOGGER.debug(f"Failed to import {module_name}: {e}") continue # Extract nodeblock-decorated functions from the module @@ -258,7 +272,14 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: """Gather all inputs and outputs for a list of NodeBlockFunctions.""" resources: dict[str, ResourceIO] = {} # Node block function inputs and outputs - for nbf in import_nodeblock_functions(package): + for nbf in import_nodeblock_functions( + package, + [ + # No nodeblock functions in these modules that dynamically isntall torch + "CPAC.unet.__init__", + "CPAC.unet._torch", + ], + ): nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" if hasattr(nbf, "inputs"): for nbf_input in _flatten_io(cast(list[Iterable], nbf.inputs)): From 0f02aeeda9a83dc352de5e3eebdb578a5ac9b53b Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 9 Jan 2025 10:58:13 -0500 Subject: [PATCH 31/58] :memo: Add `resource_inventory` to CHANGELOG [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..9de89dc58e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. +- `resource_inventory` utility to inventory NodeBlock function inputs and outputs. ### Changed From 38cdabe5844d4d32ec825e82a828f3db70b733f3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 15 Jan 2025 15:24:07 -0500 Subject: [PATCH 32/58] :sparkles: Pick up more hard-coded resources --- CPAC/pipeline/resource_inventory.py | 208 +++++++++++++++++++++++++--- 1 file changed, 188 insertions(+), 20 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 85090f533b..dfce0d1c2d 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -23,7 +23,7 @@ import importlib from importlib.resources import files import inspect -from itertools import chain +from itertools import chain, product import os from pathlib import Path from typing import Any, cast, Iterable, Optional @@ -91,9 +91,11 @@ class ResourceSourceList: sources: list[str] = field(default_factory=list) - def __add__(self, other: str | list[str]) -> list[str]: + def __add__(self, other: "str | list[str] | ResourceSourceList") -> list[str]: """Add a list of sources to the list.""" if isinstance(other, str): + if not other: + other = "(dummy node)" other = [other] new_set = {*self.sources, *other} return sorted(new_set, key=str.casefold) @@ -118,7 +120,9 @@ def __hash__(self) -> int: """Get the hash of the list of sources.""" return hash(self.sources) - def __iadd__(self, other: str | list[str]) -> "ResourceSourceList": + def __iadd__( + self, other: "str | list[str] | ResourceSourceList" + ) -> "ResourceSourceList": """Add a list of sources to the list.""" self.sources = self + other return self @@ -230,7 +234,168 @@ def _flatten_io(io: list[Iterable]) -> list[str]: return cast(list[str], io) -def find_directly_set_resources(package_name: str) -> dict[str, list[str]]: +class MultipleContext(list): + """Subclass of list to store multilpe contexts.""" + + +class DirectlySetResources(ast.NodeVisitor): + """Class to track resources set directly, rather than through NodeBlocks.""" + + def __init__(self) -> None: + """Initialize the visitor.""" + super().__init__() + self._context: dict[str, Any] = {} + self.dynamic_resources: dict[str, ResourceSourceList] = {} + self._history: dict[str, list[Any]] = {} + self.resources: dict[str, ResourceSourceList] = {} + + def assign_resource(self, resource: str, value: str) -> None: + """Assign a value to a resource.""" + target = self.dynamic_resources if r".*" in value else self.resources + resource = str(resource) + if resource not in target: + target[resource] = ResourceSourceList() + target[resource] += value + + @property + def context(self) -> dict[str, Any]: + """Return the context.""" + return self._context + + @context.setter + def context(self, value: tuple[Iterable, Any]) -> None: + """Set the context.""" + key, _value = value + if not isinstance(key, str): + for subkey in key: + self.context = subkey, _value + else: + self._context[key] = _value + if key not in self._history: + self._history[key] = [] + self._history[key].append(_value) + + def lookup_context(self, variable: str) -> str | MultipleContext: + """Plug in variable.""" + + def lookup() -> str | list[str]: + """Look up context.""" + if variable in self.context: + if self.context[variable] == variable: + history = list(self._history[variable]) + while history and history[-1] == variable: + history.pop() + if history: + return history[-1] + return self.context[variable] + return ".*" + + context = lookup() + if isinstance(context, list): + context = MultipleContext(context) + return context + + @staticmethod + def handle_multiple_contexts(contexts: list[str | list[str]]) -> list[str]: + """Parse multiple contexts.""" + if isinstance(contexts, list): + return MultipleContext( + [ + "".join(list(ctx)) + for ctx in product( + *[ + context if isinstance(context, list) else [context] + for context in contexts + ] + ) + ] + ) + return contexts + + def parse_ast(self, node: Any) -> Any: + """Parse AST.""" + if not isinstance(node, ast.AST): + if isinstance(node, str): + return node + if not isinstance(node, Iterable): + return str(node) + if isinstance(node, ast.Dict): + return { + self.parse_ast(key): self.parse_ast(value) + for key, value in dict(zip(node.keys, node.values)).items() + } + if isinstance(node, (MultipleContext, list, set, tuple)): + return type(node)(self.parse_ast(subnode) for subnode in node) + if isinstance(node, ast.FormattedValue): + if hasattr(node, "value") and hasattr(node.value, "id"): + return self.lookup_context(getattr(node.value, "id")) + if isinstance(node, ast.JoinedStr): + node_values = [self.parse_ast(value) for value in node.values] + if any(isinstance(value, MultipleContext) for value in node_values): + return self.handle_multiple_contexts(node_values) + return "".join(str(item) for item in node_values) + if isinstance(node, ast.Dict): + return { + self.parse_ast(key): self.parse_ast(value) + for key, value in dict(zip(node.keys, node.values)).items() + } + for attr in ["values", "elts"]: + if hasattr(node, attr): + return [self.parse_ast(subnode) for subnode in getattr(node, attr)] + for attr in ["value", "id"]: + if hasattr(node, attr): + return self.parse_ast(getattr(node, attr)) + return r".*" # wildcard for regex matching + + def visit_Assign(self, node: ast.Assign) -> None: + """Visit an assignment.""" + value = self.parse_ast(node.value) + for target in node.targets: + resource = self.parse_ast(target) + self.context = resource, value + # self.assign_resource(str(self.parse_ast(target)), value) + self.generic_visit(node) + + def visit_Call(self, node: ast.Call) -> None: + """Visit a function call.""" + if isinstance(node.func, ast.Attribute) and node.func.attr == "set_data": + value = self.parse_ast(node.args[5]) + if hasattr(node.args[0], "value"): + resource: str = getattr(node.args[0], "value") + elif hasattr(node.args[0], "id"): + resource = self.lookup_context(getattr(node.args[0], "id")) + if isinstance(resource, MultipleContext): + for resource_context in resource: + self.assign_resource(resource_context, value) + self.generic_visit(node) + return + elif isinstance(node.args[0], ast.JoinedStr): + resource = self.parse_ast(node.args[0]) + else: + self.generic_visit(node) + return + self.assign_resource(resource, value) + self.generic_visit(node) + + def visit_For(self, node: ast.For) -> None: + """Vist for loop.""" + # This is probably too specific, + # will need to be updated if we add more out-of-nodeblock settings. + target = self.parse_ast(node.target) + if ( + hasattr(node.iter, "func") + and hasattr(node.iter.func, "value") + and hasattr(node.iter.func.value, "id") + ): + context = self.context.get(self.parse_ast(node.iter.func.value.id), ".*") + if isinstance(target, list) and isinstance(context, dict): + self.context = target[0], list(context.keys()) + else: + self.context = target, self.parse_ast(node.iter) + self.generic_visit(node) + + +def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceList]: """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. Parameters @@ -243,28 +408,31 @@ def find_directly_set_resources(package_name: str) -> dict[str, list[str]]: dict A dictionary containing the name of the resource and the name of the functions that set it. """ - resources: dict[str[list[str]]] = {} + resources: dict[str, ResourceSourceList] = {} + dynamic_resources: dict[str, ResourceSourceList] = {} for dirpath, _, filenames in os.walk(str(files(package_name))): for filename in filenames: if filename.endswith(".py"): filepath = os.path.join(dirpath, filename) with open(filepath, "r", encoding="utf-8") as file: tree = ast.parse(file.read(), filename=filepath) - for node in ast.walk(tree): - if isinstance(node, ast.Call) and isinstance( - node.func, ast.Attribute - ): - if node.func.attr == "set_data": - try: - resource: str = ast.literal_eval(node.args[0]) - if resource not in resources: - resources[resource] = [] - resources[resource].append( - ast.literal_eval(node.args[-1]) - ) - except ValueError: - # The resource name or function name is not a literal, so this `set_data` is a dynamic call - pass + directly_set = DirectlySetResources() + directly_set.visit(tree) + for resource in directly_set.resources: + if resource not in resources: + resources[resource] = ResourceSourceList() + resources[resource] += directly_set.resources[resource] + for resource in directly_set.dynamic_resources: + if resource not in dynamic_resources: + dynamic_resources[resource] = ResourceSourceList() + dynamic_resources[resource] += directly_set.dynamic_resources[ + resource + ] + # for dynamic_key, dynamic_value in dynamic_resources.items(): + # dynamic_resource = re.compile(dynamic_key) + # for resource in resources.keys(): + # if dynamic_resource.search(resource): + # resources[resource] += dynamic_value return resources From 74a715bccfd762e627d9796f639d67272e2719a3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 16 Jan 2025 10:39:38 -0500 Subject: [PATCH 33/58] :necktie: Match keys and values for assignment loops --- CPAC/pipeline/resource_inventory.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index dfce0d1c2d..e3fa668d58 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -353,7 +353,6 @@ def visit_Assign(self, node: ast.Assign) -> None: for target in node.targets: resource = self.parse_ast(target) self.context = resource, value - # self.assign_resource(str(self.parse_ast(target)), value) self.generic_visit(node) def visit_Call(self, node: ast.Call) -> None: @@ -365,8 +364,12 @@ def visit_Call(self, node: ast.Call) -> None: elif hasattr(node.args[0], "id"): resource = self.lookup_context(getattr(node.args[0], "id")) if isinstance(resource, MultipleContext): - for resource_context in resource: - self.assign_resource(resource_context, value) + if len(resource) == len(value): + for k, v in zip(resource, value): + self.assign_resource(k, v) + else: + for resource_context in resource: + self.assign_resource(resource_context, value) self.generic_visit(node) return elif isinstance(node.args[0], ast.JoinedStr): From 9730dad954296c1889061395ee0743795580b41e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 16 Jan 2025 12:45:47 -0500 Subject: [PATCH 34/58] :necktie: Include func def args in context --- CPAC/pipeline/resource_inventory.py | 36 +++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 9 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index e3fa668d58..1f66cea394 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -251,8 +251,14 @@ def __init__(self) -> None: def assign_resource(self, resource: str, value: str) -> None: """Assign a value to a resource.""" - target = self.dynamic_resources if r".*" in value else self.resources + if isinstance(resource, ast.AST): + resource = self.parse_ast(resource) resource = str(resource) + target = ( + self.dynamic_resources + if r".*" in value or r".*" in resource + else self.resources + ) if resource not in target: target[resource] = ResourceSourceList() target[resource] += value @@ -295,8 +301,8 @@ def lookup() -> str | list[str]: context = MultipleContext(context) return context - @staticmethod - def handle_multiple_contexts(contexts: list[str | list[str]]) -> list[str]: + # @staticmethod + def handle_multiple_contexts(self, contexts: list[str | list[str]]) -> list[str]: """Parse multiple contexts.""" if isinstance(contexts, list): return MultipleContext( @@ -339,12 +345,18 @@ def parse_ast(self, node: Any) -> Any: self.parse_ast(key): self.parse_ast(value) for key, value in dict(zip(node.keys, node.values)).items() } - for attr in ["values", "elts"]: - if hasattr(node, attr): - return [self.parse_ast(subnode) for subnode in getattr(node, attr)] - for attr in ["value", "id"]: - if hasattr(node, attr): - return self.parse_ast(getattr(node, attr)) + if not isinstance(node, ast.Call): + for attr in ["values", "elts", "args"]: + if hasattr(node, attr): + iterable = getattr(node, attr) + if isinstance(iterable, Iterable): + return [ + self.parse_ast(subnode) for subnode in getattr(node, attr) + ] + return self.parse_ast(iterable) + for attr in ["value", "id", "arg"]: + if hasattr(node, attr): + return self.parse_ast(getattr(node, attr)) return r".*" # wildcard for regex matching def visit_Assign(self, node: ast.Assign) -> None: @@ -397,6 +409,12 @@ def visit_For(self, node: ast.For) -> None: self.context = target, self.parse_ast(node.iter) self.generic_visit(node) + def visit_FunctionDef(self, node: ast.FunctionDef) -> None: + """Visit a function definition.""" + for arg in self.parse_ast(node): + self.context = arg, ".*" + self.generic_visit(node) + def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceList]: """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. From 252098b776b17e775c2a5f7207774bad4e15c2bf Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 16 Jan 2025 12:49:10 -0500 Subject: [PATCH 35/58] :children_crossing: Exclude dummy node from inventory --- CPAC/pipeline/resource_inventory.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 1f66cea394..be7bdafa5f 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -94,8 +94,9 @@ class ResourceSourceList: def __add__(self, other: "str | list[str] | ResourceSourceList") -> list[str]: """Add a list of sources to the list.""" if isinstance(other, str): - if not other: - other = "(dummy node)" + if not other or other == "created_before_this_test": + # dummy node in a testing function, no need to include in inventory + return list(self) other = [other] new_set = {*self.sources, *other} return sorted(new_set, key=str.casefold) From b3521fc38509b7c5f080e92ff11496185c3514ca Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 17 Jan 2025 14:31:21 -0500 Subject: [PATCH 36/58] :necktie: Handle some special cases --- CPAC/pipeline/engine.py | 27 ++-- CPAC/pipeline/resource_inventory.py | 212 ++++++++++++++++++++++------ 2 files changed, 179 insertions(+), 60 deletions(-) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 7494ae92ee..91066d820f 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1009,6 +1009,19 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) for label_con_tpl in post_labels: label = label_con_tpl[0] connection = (label_con_tpl[1], label_con_tpl[2]) + if "desc-" not in label: + if "space-template" in label: + new_label = label.replace( + "space-template", "space-template_desc-zstd" + ) + else: + new_label = f"desc-zstd_{label}" + else: + for tag in label.split("_"): + if "desc-" in tag: + newtag = f"{tag}-zstd" + new_label = label.replace(tag, newtag) + break if label in Outputs.to_zstd: zstd = z_score_standardize(f"{label}_zstd_{pipe_x}", input_type) @@ -1017,20 +1030,6 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs) node, out = self.get_data(mask, pipe_idx=mask_idx) wf.connect(node, out, zstd, "inputspec.mask") - if "desc-" not in label: - if "space-template" in label: - new_label = label.replace( - "space-template", "space-template_desc-zstd" - ) - else: - new_label = f"desc-zstd_{label}" - else: - for tag in label.split("_"): - if "desc-" in tag: - newtag = f"{tag}-zstd" - new_label = label.replace(tag, newtag) - break - post_labels.append((new_label, zstd, "outputspec.out_file")) self.set_data( diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index be7bdafa5f..36038913f4 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -19,6 +19,7 @@ from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace import ast +from collections.abc import Hashable from dataclasses import dataclass, field import importlib from importlib.resources import files @@ -26,6 +27,7 @@ from itertools import chain, product import os from pathlib import Path +import re from typing import Any, cast, Iterable, Optional from unittest.mock import patch @@ -34,9 +36,30 @@ from CPAC.pipeline.engine import template_dataframe from CPAC.pipeline.nodeblock import NodeBlockFunction +from CPAC.pipeline.schema import latest_schema from CPAC.utils.monitoring import UTLOGGER from CPAC.utils.outputs import Outputs +ONE_OFFS: dict[str, list[str]] = { + r".*desc-preproc_bold": ["func_ingress"], + r".*-sm.*": [ + f"spatial_smoothing_{smooth_opt}" + for smooth_opt in latest_schema.schema["post_processing"]["spatial_smoothing"][ + "smoothing_method" + ][0].container + ], + r".*-zstd.*": [f"{fisher}zscore_standardize" for fisher in ["", "fisher_"]], +} +"""A few out-of-nodeblock generated resources. + +Easier to note these manually than to code up the AST rules.""" + +SKIPS: list[str] = [ + "CPAC.unet.__init__", + "CPAC.unet._torch", +] +"""No nodeblock functions in these modules that dynamically install `torch`.""" + def import_nodeblock_functions( package_name: str, exclude: Optional[list[str]] = None @@ -238,6 +261,47 @@ def _flatten_io(io: list[Iterable]) -> list[str]: class MultipleContext(list): """Subclass of list to store multilpe contexts.""" + def __init__(self, /, *args, **kwargs) -> None: + """Initialize MultipleContext.""" + super().__init__(*args, **kwargs) + data = self._unique(self) + self.clear() + self.extend(data) + + def __hash__(self) -> int: + """Hash a MultipleContext instance.""" + return hash(str(self)) + + def __str__(self) -> str: + """Return a stringified MultipleContext instance.""" + if len(self) == 1: + return str(self[0]) + return super().__str__() + + def append(self, item: Any) -> None: + """Append if not already included.""" + if item not in self: + super().append(item) + + def extend(self, iterable: Iterable) -> None: + """Extend MultipleContext.""" + for item in iterable: + self.append(item) + + @staticmethod + def _unique(iterable: Iterable) -> list: + """Dedupe.""" + try: + seen = set() + return [x for x in iterable if not (x in seen or seen.add(x))] + except TypeError: + seen = set() + return [ + x + for x in (MultipleContext(item) for item in iterable) + if not (x in seen or seen.add(x)) + ] + class DirectlySetResources(ast.NodeVisitor): """Class to track resources set directly, rather than through NodeBlocks.""" @@ -246,15 +310,22 @@ def __init__(self) -> None: """Initialize the visitor.""" super().__init__() self._context: dict[str, Any] = {} - self.dynamic_resources: dict[str, ResourceSourceList] = {} + self.dynamic_resources: dict[str, ResourceSourceList] = { + resource: ResourceSourceList(sources) + for resource, sources in ONE_OFFS.items() + } self._history: dict[str, list[Any]] = {} self.resources: dict[str, ResourceSourceList] = {} - def assign_resource(self, resource: str, value: str) -> None: + def assign_resource(self, resource: str, value: str | MultipleContext) -> None: """Assign a value to a resource.""" if isinstance(resource, ast.AST): resource = self.parse_ast(resource) resource = str(resource) + if isinstance(value, MultipleContext): + for subvalue in value: + self.assign_resource(resource, subvalue) + return target = ( self.dynamic_resources if r".*" in value or r".*" in resource @@ -279,31 +350,36 @@ def context(self, value: tuple[Iterable, Any]) -> None: else: self._context[key] = _value if key not in self._history: - self._history[key] = [] + self._history[key] = [".*"] self._history[key].append(_value) - def lookup_context(self, variable: str) -> str | MultipleContext: + def lookup_context( + self, variable: str, return_type: Optional[type] = None + ) -> str | MultipleContext: """Plug in variable.""" - - def lookup() -> str | list[str]: - """Look up context.""" - if variable in self.context: - if self.context[variable] == variable: - history = list(self._history[variable]) - while history and history[-1] == variable: - history.pop() - if history: - return history[-1] - return self.context[variable] - return ".*" - - context = lookup() - if isinstance(context, list): - context = MultipleContext(context) - return context - - # @staticmethod - def handle_multiple_contexts(self, contexts: list[str | list[str]]) -> list[str]: + if variable in self.context: + if self.context[variable] == variable or ( + return_type and not isinstance(self.context[variable], return_type) + ): + history = list(self._history[variable]) + while history and history[-1] == variable: + history.pop() + if history: + context = history[-1] + while ( + return_type + and len(history) + and not isinstance(context, return_type) + ): + context = history.pop() + if return_type and not isinstance(context, return_type): + return ".*" + return context + return self.context[variable] + return ".*" + + @staticmethod + def handle_multiple_contexts(contexts: list[str | list[str]]) -> list[str]: """Parse multiple contexts.""" if isinstance(contexts, list): return MultipleContext( @@ -322,9 +398,7 @@ def handle_multiple_contexts(self, contexts: list[str | list[str]]) -> list[str] def parse_ast(self, node: Any) -> Any: """Parse AST.""" if not isinstance(node, ast.AST): - if isinstance(node, str): - return node - if not isinstance(node, Iterable): + if isinstance(node, str) or not isinstance(node, Iterable): return str(node) if isinstance(node, ast.Dict): return { @@ -343,7 +417,9 @@ def parse_ast(self, node: Any) -> Any: return "".join(str(item) for item in node_values) if isinstance(node, ast.Dict): return { - self.parse_ast(key): self.parse_ast(value) + self.parse_ast(key) + if isinstance(self.parse_ast(key), Hashable) + else ".*": self.parse_ast(value) for key, value in dict(zip(node.keys, node.values)).items() } if not isinstance(node, ast.Call): @@ -358,11 +434,22 @@ def parse_ast(self, node: Any) -> Any: for attr in ["value", "id", "arg"]: if hasattr(node, attr): return self.parse_ast(getattr(node, attr)) + elif ( + hasattr(node, "func") + and getattr(node.func, "attr", None) in ["items", "keys", "values"] + and getattr(getattr(node.func, "value", None), "id", None) in self.context + ): + context = self.lookup_context(node.func.value.id, return_type=dict) + if isinstance(context, dict): + return MultipleContext(getattr(context, node.func.attr)()) return r".*" # wildcard for regex matching def visit_Assign(self, node: ast.Assign) -> None: """Visit an assignment.""" value = self.parse_ast(node.value) + if value == "row" and getattr(node.value, "attr", None): + # hack for template dataframe + value = MultipleContext(getattr(template_dataframe(), node.value.attr)) for target in node.targets: resource = self.parse_ast(target) self.context = resource, value @@ -372,6 +459,9 @@ def visit_Call(self, node: ast.Call) -> None: """Visit a function call.""" if isinstance(node.func, ast.Attribute) and node.func.attr == "set_data": value = self.parse_ast(node.args[5]) + if isinstance(node.args[5], ast.Name): + if isinstance(value, str): + value = self.lookup_context(value) if hasattr(node.args[0], "value"): resource: str = getattr(node.args[0], "value") elif hasattr(node.args[0], "id"): @@ -395,29 +485,52 @@ def visit_Call(self, node: ast.Call) -> None: def visit_For(self, node: ast.For) -> None: """Vist for loop.""" - # This is probably too specific, - # will need to be updated if we add more out-of-nodeblock settings. target = self.parse_ast(node.target) if ( hasattr(node.iter, "func") and hasattr(node.iter.func, "value") and hasattr(node.iter.func.value, "id") ): - context = self.context.get(self.parse_ast(node.iter.func.value.id), ".*") - if isinstance(target, list) and isinstance(context, dict): - self.context = target[0], list(context.keys()) + context = self.parse_ast(node.iter) + if not context: + context = r".*" + if isinstance(target, list): + target_len = len(target) + if isinstance(context, dict): + self.context = target[0], MultipleContext(context.keys()) + if isinstance(context, list) and all( + (isinstance(item, tuple) and len(item) == target_len) + for item in context + ): + for index, item in enumerate(target): + self.context = ( + item, + MultipleContext( + subcontext[index] for subcontext in context + ), + ) + elif hasattr(node.iter, "value") and ( + getattr(node.iter.value, "id", None) == "self" + or getattr(node.iter, "attr", False) + ): + self.context = target, ".*" else: self.context = target, self.parse_ast(node.iter) self.generic_visit(node) def visit_FunctionDef(self, node: ast.FunctionDef) -> None: """Visit a function definition.""" + if node.name == "set_data": + # skip the method definition + return for arg in self.parse_ast(node): self.context = arg, ".*" self.generic_visit(node) -def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceList]: +def find_directly_set_resources( + package_name: str, +) -> tuple[dict[str, ResourceSourceList], dict[str, ResourceSourceList]]: """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`. Parameters @@ -429,6 +542,9 @@ def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceLi ------- dict A dictionary containing the name of the resource and the name of the functions that set it. + + dict + A dictionary containing regex strings for special cases """ resources: dict[str, ResourceSourceList] = {} dynamic_resources: dict[str, ResourceSourceList] = {} @@ -450,12 +566,7 @@ def find_directly_set_resources(package_name: str) -> dict[str, ResourceSourceLi dynamic_resources[resource] += directly_set.dynamic_resources[ resource ] - # for dynamic_key, dynamic_value in dynamic_resources.items(): - # dynamic_resource = re.compile(dynamic_key) - # for resource in resources.keys(): - # if dynamic_resource.search(resource): - # resources[resource] += dynamic_value - return resources + return resources, dynamic_resources def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: @@ -464,11 +575,7 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: # Node block function inputs and outputs for nbf in import_nodeblock_functions( package, - [ - # No nodeblock functions in these modules that dynamically isntall torch - "CPAC.unet.__init__", - "CPAC.unet._torch", - ], + exclude=SKIPS, ): nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" if hasattr(nbf, "inputs"): @@ -498,7 +605,8 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: else: resources[row.Key].output_from += output_from # Hard-coded resources - for resource, functions in find_directly_set_resources(package).items(): + direct, dynamic = find_directly_set_resources(package) + for resource, functions in direct.items(): if resource not in resources: resources[resource] = ResourceIO(resource, output_from=functions) else: @@ -511,6 +619,18 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: ) else: resources[row.Resource].output_to += row["Sub-Directory"] + # Special cases + for dynamic_key, dynamic_value in dynamic.items(): + if dynamic_key != r".*": + dynamic_resource = re.compile(dynamic_key) + for resource in resources.keys(): + if dynamic_resource.search(resource): + resources[resource].output_from += dynamic_value + if "interface" in resources: + # this is a loop in setting up nodeblocks + # https://github.com/FCP-INDI/C-PAC/blob/61ad414447023daf0e401a81c92267b09c64ed94/CPAC/pipeline/engine.py#L1453-L1464 + # it's already handled in the NodeBlock resources + del resources["interface"] return dict(sorted(resources.items(), key=lambda item: item[0].casefold())) From 352516dd14a320afad5cd1262479b70436a4c8a3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 9 Jan 2025 11:38:11 -0500 Subject: [PATCH 37/58] :children_crossing: Include resource soure information in resource-not-found errors --- CHANGELOG.md | 1 + CPAC/pipeline/engine.py | 14 ++++++++++---- CPAC/pipeline/resource_inventory.py | 22 ++++++++++++++++++++-- 3 files changed, 31 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9de89dc58e..b67477ffde 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). +- Resource-not-found errors now include information about where to source those resources. ### Fixed diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 91066d820f..878b743bfe 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -420,10 +420,12 @@ def get( if report_fetched: return (None, None) return None + from CPAC.pipeline.resource_inventory import where_to_find + msg = ( "\n\n[!] C-PAC says: None of the listed resources are in " - f"the resource pool:\n\n {resource}\n\nOptions:\n- You " - "can enable a node block earlier in the pipeline which " + f"the resource pool:\n\n {where_to_find(resource)}\n\nOptions:\n" + "- You can enable a node block earlier in the pipeline which " "produces these resources. Check the 'outputs:' field in " "a node block's documentation.\n- You can directly " "provide this required data by pulling it from another " @@ -458,7 +460,9 @@ def copy_resource(self, resource, new_name): try: self.rpool[new_name] = self.rpool[resource] except KeyError: - msg = f"[!] {resource} not in the resource pool." + from CPAC.pipeline.resource_inventory import where_to_find + + msg = f"[!] Not in the resource pool:\n{where_to_find(resource)}" raise Exception(msg) def update_resource(self, resource, new_name): @@ -630,11 +634,13 @@ def get_strats(self, resources, debug=False): total_pool.append(sub_pool) if not total_pool: + from CPAC.pipeline.resource_inventory import where_to_find + raise LookupError( "\n\n[!] C-PAC says: None of the listed " "resources in the node block being connected " "exist in the resource pool.\n\nResources:\n" - "%s\n\n" % resource_list + "%s\n\n" % where_to_find(resource_list) ) # TODO: right now total_pool is: diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 36038913f4..45396dfe06 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -242,7 +242,7 @@ def cli_parser() -> Namespace: return parser.parse_args() -def _flatten_io(io: list[Iterable]) -> list[str]: +def _flatten_io(io: Iterable[Iterable]) -> list[str]: """Given a list of strings or iterables thereof, flatten the list to all strings.""" if all(isinstance(resource, str) for resource in io): return cast(list[str], io) @@ -577,7 +577,7 @@ def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]: package, exclude=SKIPS, ): - nbf_name = f"{nbf.__module__}.{nbf.__qualname__}" + nbf_name = f"{nbf.name} ({nbf.__module__}.{nbf.__qualname__})" if hasattr(nbf, "inputs"): for nbf_input in _flatten_io(cast(list[Iterable], nbf.inputs)): if nbf_input: @@ -641,6 +641,24 @@ def dump_inventory_to_yaml(inventory: dict[str, ResourceIO]) -> str: ) +def where_to_find(resources: list[str] | str) -> str: + """Return a multiline string describing where each listed resource is output from.""" + if isinstance(resources, str): + resources = [resources] + resources = _flatten_io(resources) + inventory = resource_inventory("CPAC") + output = "" + for resource in resources: + output += f"'{resource}' is output from:\n" + if resource in inventory: + for source in inventory[resource].output_from: + output += f" {source}\n" + else: + output += " !! Nowhere !!\n" + output += "\n" + return output.rstrip() + + def main() -> None: """Save the NodeBlock inventory to a file.""" args = cli_parser() From 77caa2a7cef0c85defe7e01644569f4728e1a469 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 10 Jan 2025 15:53:04 -0500 Subject: [PATCH 38/58] :recycle: Move bids_examples into a reusable pyest fixture --- CPAC/conftest.py | 34 ++++++++++++++++++++++++ dev/circleci_data/conftest.py | 19 +++++++++++++ dev/circleci_data/test_external_utils.py | 13 ++------- 3 files changed, 55 insertions(+), 11 deletions(-) create mode 100644 CPAC/conftest.py create mode 100644 dev/circleci_data/conftest.py diff --git a/CPAC/conftest.py b/CPAC/conftest.py new file mode 100644 index 0000000000..ea2be416a5 --- /dev/null +++ b/CPAC/conftest.py @@ -0,0 +1,34 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global fixtures for C-PAC tests.""" + +from pathlib import Path + +from _pytest.tmpdir import TempPathFactory +from git import Repo +import pytest + + +@pytest.fixture(scope="session") +def bids_examples(tmp_path_factory: TempPathFactory) -> Path: + """Get the BIDS examples dataset.""" + example_dir = tmp_path_factory.mktemp("bids-examples") + if not example_dir.exists(): + Repo.clone_from( + "https://github.com/bids-standard/bids-examples.git", str(example_dir) + ) + return example_dir diff --git a/dev/circleci_data/conftest.py b/dev/circleci_data/conftest.py new file mode 100644 index 0000000000..ba239b2b4f --- /dev/null +++ b/dev/circleci_data/conftest.py @@ -0,0 +1,19 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global fixtures for C-PAC tests.""" + +from CPAC.conftest import * # noqa: F403 diff --git a/dev/circleci_data/test_external_utils.py b/dev/circleci_data/test_external_utils.py index f516b0c903..31f6b243da 100644 --- a/dev/circleci_data/test_external_utils.py +++ b/dev/circleci_data/test_external_utils.py @@ -31,8 +31,6 @@ from CPAC.__main__ import utils as CPAC_main_utils # noqa: E402 -# pylint: disable=wrong-import-position - def _click_backport(command, key): """Switch back to underscores for older versions of click.""" @@ -93,18 +91,11 @@ def test_build_data_config(caplog, cli_runner, multiword_connector): _delete_test_yaml(test_yaml) -def test_new_settings_template(caplog, cli_runner): +def test_new_settings_template(bids_examples: Path, caplog, cli_runner): """Test CLI ``utils new-settings-template``.""" caplog.set_level(INFO) os.chdir(CPAC_DIR) - - example_dir = os.path.join(CPAC_DIR, "bids-examples") - if not os.path.exists(example_dir): - from git import Repo - - Repo.clone_from( - "https://github.com/bids-standard/bids-examples.git", example_dir - ) + assert bids_examples.exists() result = cli_runner.invoke( CPAC_main_utils.commands[ From 5d1112551fd0f7103bf22e32ab6e4ea6ea0c34c7 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 10 Jan 2025 17:12:29 -0500 Subject: [PATCH 39/58] :white_check_mark: Add test for resource inventory error message --- CPAC/_entrypoints/run.py | 2 +- CPAC/conftest.py | 2 +- CPAC/pipeline/resource_inventory.py | 2 +- CPAC/pipeline/test/test_engine.py | 140 +++++++++++++++++++++++++++- 4 files changed, 140 insertions(+), 6 deletions(-) diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py index 98a30ba094..f84b6cf799 100755 --- a/CPAC/_entrypoints/run.py +++ b/CPAC/_entrypoints/run.py @@ -795,7 +795,7 @@ def run_main(): args.data_config_file, args.participant_label, args.aws_input_creds ) sub_list = sub_list_filter_by_labels( - sub_list, {"T1w": args.T1w_label, "bold": args.bold_label} + list(sub_list), {"T1w": args.T1w_label, "bold": args.bold_label} ) # C-PAC only handles single anatomical images (for now) diff --git a/CPAC/conftest.py b/CPAC/conftest.py index ea2be416a5..7b765736ee 100644 --- a/CPAC/conftest.py +++ b/CPAC/conftest.py @@ -27,7 +27,7 @@ def bids_examples(tmp_path_factory: TempPathFactory) -> Path: """Get the BIDS examples dataset.""" example_dir = tmp_path_factory.mktemp("bids-examples") - if not example_dir.exists(): + if not example_dir.exists() or not any(example_dir.iterdir()): Repo.clone_from( "https://github.com/bids-standard/bids-examples.git", str(example_dir) ) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 45396dfe06..01c28ae74e 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -649,7 +649,7 @@ def where_to_find(resources: list[str] | str) -> str: inventory = resource_inventory("CPAC") output = "" for resource in resources: - output += f"'{resource}' is output from:\n" + output += f"'{resource}' can be output from:\n" if resource in inventory: for source in inventory[resource].output_from: output += f" {source}\n" diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index cf85f50dbe..25b16d9e44 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -1,5 +1,27 @@ +# Copyright (C) 2021-2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Unit tests for the C-PAC pipeline engine.""" + +from argparse import Namespace import os +from pathlib import Path +from typing import cast +from _pytest.logging import LogCaptureFixture import pytest from CPAC.pipeline.cpac_pipeline import ( @@ -138,17 +160,129 @@ def test_build_workflow(pipe_config, bids_dir, test_dir): wf.run() +def test_missing_resource( + bids_examples: Path, caplog: LogCaptureFixture, tmp_path: Path +) -> None: + """Test the error message thrown when a resource is missing.""" + from datetime import datetime + + import yaml + + from CPAC.pipeline.cpac_runner import run + from CPAC.utils.bids_utils import sub_list_filter_by_labels + from CPAC.utils.configuration import Preconfiguration, set_subject + from CPAC.utils.configuration.yaml_template import create_yaml_from_template + + st = datetime.now().strftime("%Y-%m-%dT%H-%M-%SZ") + namespace = Namespace( + bids_dir=str(bids_examples / "ds113b"), + output_dir=str(tmp_path / "output"), + analysis_level="test_config", + participant_label="sub-01", + ) + c = Preconfiguration("anat-only") + c["pipeline_setup", "output_directory", "path"] = namespace.output_dir + c["pipeline_setup", "log_directory", "path"] = str(tmp_path / "logs") + c["pipeline_setup", "working_directory", "path"] = str(tmp_path / "work") + c["pipeline_setup", "system_config", "maximum_memory_per_participant"] = 1.0 + c["pipeline_setup", "system_config", "max_cores_per_participant"] = 1 + c["pipeline_setup", "system_config", "num_participants_at_once"] = 1 + c["pipeline_setup", "system_config", "num_ants_threads"] = 1 + c["pipeline_setup", "working_directory", "remove_working_dir"] = True + sub_list = create_cpac_data_config( + namespace.bids_dir, + namespace.participant_label, + None, + True, + only_one_anat=False, + ) + sub_list = sub_list_filter_by_labels(list(sub_list), {"T1w": None, "bold": None}) + for i, sub in enumerate(sub_list): + if isinstance(sub.get("anat"), dict): + for anat_key in sub["anat"]: + if isinstance(sub["anat"][anat_key], list) and len( + sub["anat"][anat_key] + ): + sub_list[i]["anat"][anat_key] = sub["anat"][anat_key][0] + if isinstance(sub.get("anat"), list) and len(sub["anat"]): + sub_list[i]["anat"] = sub["anat"][0] + data_config_file = f"cpac_data_config_{st}.yml" + sublogdirs = [set_subject(sub, c)[2] for sub in sub_list] + # write out the data configuration file + data_config_file = os.path.join(sublogdirs[0], data_config_file) + with open(data_config_file, "w", encoding="utf-8") as _f: + noalias_dumper = yaml.dumper.SafeDumper + noalias_dumper.ignore_aliases = lambda self, data: True + yaml.dump(sub_list, _f, default_flow_style=False, Dumper=noalias_dumper) + + # update and write out pipeline config file + pipeline_config_file = os.path.join(sublogdirs[0], f"cpac_pipeline_config_{st}.yml") + with open(pipeline_config_file, "w", encoding="utf-8") as _f: + _f.write(create_yaml_from_template(c)) + minimized_config = f"{pipeline_config_file[:-4]}_min.yml" + with open(minimized_config, "w", encoding="utf-8") as _f: + _f.write(create_yaml_from_template(c, import_from="blank")) + for config_file in (data_config_file, pipeline_config_file, minimized_config): + os.chmod(config_file, 0o444) # Make config files readonly + + if len(sublogdirs) > 1: + # If more than one run is included in the given data config + # file, an identical copy of the data and pipeline config + # will be included in the log directory for each run + for sublogdir in sublogdirs[1:]: + for config_file in ( + data_config_file, + pipeline_config_file, + minimized_config, + ): + try: + os.link(config_file, config_file.replace(sublogdirs[0], sublogdir)) + except FileExistsError: + pass + + run( + data_config_file, + pipeline_config_file, + plugin="Linear", + plugin_args={ + "n_procs": int( + cast( + int | str, + c["pipeline_setup", "system_config", "max_cores_per_participant"], + ) + ), + "memory_gb": int( + cast( + int | str, + c[ + "pipeline_setup", + "system_config", + "maximum_memory_per_participant", + ], + ) + ), + "raise_insufficient": c[ + "pipeline_setup", "system_config", "raise_insufficient" + ], + }, + tracking=False, + test_config=namespace.analysis_level == "test_config", + ) + + assert "can be output from" in caplog.text + + # bids_dir = "/Users/steven.giavasis/data/HBN-SI_dataset/rawdata" # test_dir = "/test_dir" # cfg = "/Users/hecheng.jin/GitHub/DevBranch/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml" -cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml" -bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis" -test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc" # test_ingress_func_raw_data(cfg, bids_dir, test_dir) # test_ingress_anat_raw_data(cfg, bids_dir, test_dir) # test_ingress_pipeconfig_data(cfg, bids_dir, test_dir) # test_build_anat_preproc_stack(cfg, bids_dir, test_dir) if __name__ == "__main__": + cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml" + bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis" + test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc" test_build_workflow(cfg, bids_dir, test_dir) From 8b369f3105390773277b202fc54ad2831a4ecbff Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 13 Jan 2025 14:19:12 -0500 Subject: [PATCH 40/58] :construction_worker: Install `openssh-client` for bids-examples fixture --- .circleci/main.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index a13300a78d..f936f9230d 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -48,7 +48,7 @@ commands: sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 78BD65473CB3BD13 curl -L https://packagecloud.io/circleci/trusty/gpgkey | sudo apt-key add - sudo apt-get update - sudo apt-get install git -y + sudo apt-get install git openssh-client -y git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (machine user) @ CircleCI" create-docker-test-container: @@ -64,11 +64,6 @@ commands: mkdir -p ~/project/test-results docker pull ${DOCKER_TAG} docker run -v /etc/passwd:/etc/passwd --user=$(id -u):c-pac -dit -P -e COVERAGE_FILE=<< parameters.coverage-file >> -v /home/circleci/project/test-results:/code/test-results -v /home/circleci:/home/circleci -v /home/circleci/project/CPAC/resources/configs/test_configs:/test_configs -v $PWD:/code -v $PWD/dev/circleci_data:$PWD/dev/circleci_data --workdir=/home/circleci/project --entrypoint=/bin/bash --name docker_test ${DOCKER_TAG} - get-sample-bids-data: - steps: - - run: - name: Getting Sample BIDS Data - command: git clone https://github.com/bids-standard/bids-examples.git get-singularity: parameters: version: @@ -231,7 +226,6 @@ jobs: - set-up-variant: variant: "<< parameters.variant >>" - set-python-version - - get-sample-bids-data - run-pytest-docker - store_test_results: path: test-results From c6b3a18f66138159ca5aab9f6cd9716a3d9ec067 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 20 Jan 2025 10:14:54 -0500 Subject: [PATCH 41/58] :children_crossing: Increase indent before missing resource sources --- CPAC/pipeline/resource_inventory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py index 01c28ae74e..a181ea6567 100755 --- a/CPAC/pipeline/resource_inventory.py +++ b/CPAC/pipeline/resource_inventory.py @@ -652,9 +652,9 @@ def where_to_find(resources: list[str] | str) -> str: output += f"'{resource}' can be output from:\n" if resource in inventory: for source in inventory[resource].output_from: - output += f" {source}\n" + output += f" {source}\n" else: - output += " !! Nowhere !!\n" + output += " !! Nowhere !!\n" output += "\n" return output.rstrip() From 11948a64c94c05ef02338d3d7c4452ca3941eed2 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 11 Feb 2025 18:39:58 -0500 Subject: [PATCH 42/58] Checking if overwrite transform method is same as the anatomical registration method --- CPAC/error_handler/__init__.py | 0 CPAC/error_handler/exceptions.py | 12 +++++++ CPAC/pipeline/schema.py | 9 ++++++ CPAC/pipeline/test/test_schema_validation.py | 34 ++++++++++++++++++++ CPAC/registration/registration.py | 5 +++ 5 files changed, 60 insertions(+) create mode 100644 CPAC/error_handler/__init__.py create mode 100644 CPAC/error_handler/exceptions.py diff --git a/CPAC/error_handler/__init__.py b/CPAC/error_handler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/CPAC/error_handler/exceptions.py b/CPAC/error_handler/exceptions.py new file mode 100644 index 0000000000..1f51fcd575 --- /dev/null +++ b/CPAC/error_handler/exceptions.py @@ -0,0 +1,12 @@ + +class SchemaError(Exception): + """Exception raised for errors in the schema.""" + def __init__(self, message): + self.message = message + super().__init__(self.message) + +class NodeBlockError(Exception): + """Exception raised for errors in the node block.""" + def __init__(self, message): + self.message = message + super().__init__(self.message) \ No newline at end of file diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index fa36a0dd2e..e08261a16d 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -50,6 +50,7 @@ from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.utils import YAML_BOOLS +from CPAC.error_handler.exceptions import SchemaError # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits SCIENTIFIC_NOTATION_STR_REGEX = r"^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$" @@ -1388,6 +1389,14 @@ def schema(config_dict): " Try turning one option off.\n " ) raise ExclusiveInvalid(msg) + + overwrite = partially_validated["registration_workflows"]["anatomical_registration"]["overwrite_transform"] + + if overwrite["run"] and overwrite["using"] in partially_validated["registration_workflows"]["anatomical_registration"]["registration"]["using"]: + raise ExclusiveInvalid( + "[!] Overwrite transform is found same as the anatomical registration method! " + "No need to overwrite transform with the same registration method." + ) except KeyError: pass try: diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 36a75a1a00..8f03bafc82 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -113,3 +113,37 @@ def test_pipeline_name(): """Test that pipeline_name sucessfully sanitizes.""" c = Configuration({"pipeline_setup": {"pipeline_name": ":va:lid name"}}) assert c["pipeline_setup", "pipeline_name"] == "valid_name" + + +@pytest.mark.parametrize( + "registration_using", + [ + list(combo) + for _ in [ + list(combinations(["ANTS", "FSL", "FSL-linear"], i)) for i in range(1, 4) + ] + for combo in _ + ], +) +def test_overwrite_transform(registration_using): + """Test that if overwrite transform method is already a registration method.""" + # pylint: disable=invalid-name + d = { + "registration_workflows": { + "anatomical_registration": { + "registration": { + "using": registration_using + }, + "overwrite_transform": { + "run": "On", + "using": "FSL" + } + } + } + } + if "FSL" not in registration_using: + Configuration(d) # validates without exception + else: + with pytest.raises(ExclusiveInvalid) as e: + Configuration(d) + assert "Overwrite transform is found same as the registration method" in str(e.value) \ No newline at end of file diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 4848637d4d..3adcad676a 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -42,6 +42,7 @@ from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool +from CPAC.error_handler.exceptions import NodeBlockError def apply_transform( @@ -3079,6 +3080,10 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None "from-template_to-T1w_mode-image_xfm": (merge_inv_xfms, "merged_file"), } + else: + outputs = {} + raise NodeBlockError("Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL.") + return (wf, outputs) From 7b3603af7862211f598701c837654e13f3e1a673 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Tue, 11 Feb 2025 18:45:37 -0500 Subject: [PATCH 43/58] with precommit changes --- CPAC/error_handler/exceptions.py | 6 ++++-- CPAC/pipeline/schema.py | 13 ++++++++++--- CPAC/pipeline/test/test_schema_validation.py | 13 +++++-------- CPAC/registration/registration.py | 6 ++++-- 4 files changed, 23 insertions(+), 15 deletions(-) diff --git a/CPAC/error_handler/exceptions.py b/CPAC/error_handler/exceptions.py index 1f51fcd575..5c17a4028f 100644 --- a/CPAC/error_handler/exceptions.py +++ b/CPAC/error_handler/exceptions.py @@ -1,12 +1,14 @@ - class SchemaError(Exception): """Exception raised for errors in the schema.""" + def __init__(self, message): self.message = message super().__init__(self.message) + class NodeBlockError(Exception): """Exception raised for errors in the node block.""" + def __init__(self, message): self.message = message - super().__init__(self.message) \ No newline at end of file + super().__init__(self.message) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index e08261a16d..825b682fb9 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -50,7 +50,6 @@ from CPAC.utils.datatypes import ItemFromList, ListFromItem from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.utils import YAML_BOOLS -from CPAC.error_handler.exceptions import SchemaError # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits SCIENTIFIC_NOTATION_STR_REGEX = r"^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$" @@ -1390,9 +1389,17 @@ def schema(config_dict): ) raise ExclusiveInvalid(msg) - overwrite = partially_validated["registration_workflows"]["anatomical_registration"]["overwrite_transform"] + overwrite = partially_validated["registration_workflows"][ + "anatomical_registration" + ]["overwrite_transform"] - if overwrite["run"] and overwrite["using"] in partially_validated["registration_workflows"]["anatomical_registration"]["registration"]["using"]: + if ( + overwrite["run"] + and overwrite["using"] + in partially_validated["registration_workflows"]["anatomical_registration"][ + "registration" + ]["using"] + ): raise ExclusiveInvalid( "[!] Overwrite transform is found same as the anatomical registration method! " "No need to overwrite transform with the same registration method." diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 8f03bafc82..2b680ddd5a 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -131,13 +131,8 @@ def test_overwrite_transform(registration_using): d = { "registration_workflows": { "anatomical_registration": { - "registration": { - "using": registration_using - }, - "overwrite_transform": { - "run": "On", - "using": "FSL" - } + "registration": {"using": registration_using}, + "overwrite_transform": {"run": "On", "using": "FSL"}, } } } @@ -146,4 +141,6 @@ def test_overwrite_transform(registration_using): else: with pytest.raises(ExclusiveInvalid) as e: Configuration(d) - assert "Overwrite transform is found same as the registration method" in str(e.value) \ No newline at end of file + assert "Overwrite transform is found same as the registration method" in str( + e.value + ) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 3adcad676a..f7f429aeda 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,6 +24,7 @@ from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc +from CPAC.error_handler.exceptions import NodeBlockError from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock @@ -42,7 +43,6 @@ from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool -from CPAC.error_handler.exceptions import NodeBlockError def apply_transform( @@ -3082,7 +3082,9 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None else: outputs = {} - raise NodeBlockError("Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL.") + raise NodeBlockError( + "Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL." + ) return (wf, outputs) From a8adeb48480c8b3176e581852100066dda8ad336 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:46:48 -0500 Subject: [PATCH 44/58] Update CPAC/pipeline/schema.py Co-authored-by: Jon Cluce --- CPAC/pipeline/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 825b682fb9..0966d9c44f 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1395,7 +1395,7 @@ def schema(config_dict): if ( overwrite["run"] - and overwrite["using"] + and "ANTS" not in partially_validated["registration_workflows"]["anatomical_registration"][ "registration" ]["using"] From 8cf804f06ed11b885be4cfd4e85d4fc471b6d1e4 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:46:56 -0500 Subject: [PATCH 45/58] Update CPAC/pipeline/test/test_schema_validation.py Co-authored-by: Jon Cluce --- CPAC/pipeline/test/test_schema_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 2b680ddd5a..02362ed0f2 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -136,7 +136,7 @@ def test_overwrite_transform(registration_using): } } } - if "FSL" not in registration_using: + if "ANTS" in registration_using: Configuration(d) # validates without exception else: with pytest.raises(ExclusiveInvalid) as e: From 8c4a7affc56b28cb6800bcad15a4a1f1487f6a49 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:47:02 -0500 Subject: [PATCH 46/58] Update CPAC/registration/registration.py Co-authored-by: Jon Cluce --- CPAC/registration/registration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index f7f429aeda..abd58e9bc0 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,7 +24,6 @@ from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc -from CPAC.error_handler.exceptions import NodeBlockError from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock From 76bd2ae9ba12ffcbcf8a86cd3befb5566f29e7f0 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:47:08 -0500 Subject: [PATCH 47/58] Update CPAC/registration/registration.py Co-authored-by: Jon Cluce --- CPAC/registration/registration.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index abd58e9bc0..754071f199 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3081,9 +3081,6 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None else: outputs = {} - raise NodeBlockError( - "Invalid registration tool or option provided. Please make sure the registration tool is ANTs and the option is FSL." - ) return (wf, outputs) From ba2cff458f8d5ccc3ee304a71b9dacb96ee8bd7d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 22:47:15 +0000 Subject: [PATCH 48/58] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CPAC/pipeline/schema.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 0966d9c44f..693ffbaf77 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1395,10 +1395,10 @@ def schema(config_dict): if ( overwrite["run"] - and "ANTS" not - in partially_validated["registration_workflows"]["anatomical_registration"][ - "registration" - ]["using"] + and "ANTS" + not in partially_validated["registration_workflows"][ + "anatomical_registration" + ]["registration"]["using"] ): raise ExclusiveInvalid( "[!] Overwrite transform is found same as the anatomical registration method! " From 82e5b93a5d9b3cd8f13b59bf295de4ceb6de002d Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 6 Mar 2025 17:50:34 -0500 Subject: [PATCH 49/58] revert back adding error handler dir --- CPAC/error_handler/__init__.py | 0 CPAC/error_handler/exceptions.py | 14 -------------- 2 files changed, 14 deletions(-) delete mode 100644 CPAC/error_handler/__init__.py delete mode 100644 CPAC/error_handler/exceptions.py diff --git a/CPAC/error_handler/__init__.py b/CPAC/error_handler/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/CPAC/error_handler/exceptions.py b/CPAC/error_handler/exceptions.py deleted file mode 100644 index 5c17a4028f..0000000000 --- a/CPAC/error_handler/exceptions.py +++ /dev/null @@ -1,14 +0,0 @@ -class SchemaError(Exception): - """Exception raised for errors in the schema.""" - - def __init__(self, message): - self.message = message - super().__init__(self.message) - - -class NodeBlockError(Exception): - """Exception raised for errors in the node block.""" - - def __init__(self, message): - self.message = message - super().__init__(self.message) From 22f3a86758f643f99f1f366a4ac62b4139b07b27 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Thu, 6 Mar 2025 18:20:43 -0500 Subject: [PATCH 50/58] adding to changelog --- CHANGELOG.md | 1 + CPAC/pipeline/schema.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..14d8cbf807 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. +- Allow enabling `overwrite_transform` only when the registration method is `ANTS`. ### Changed diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 693ffbaf77..56a4c54fc6 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1402,7 +1402,7 @@ def schema(config_dict): ): raise ExclusiveInvalid( "[!] Overwrite transform is found same as the anatomical registration method! " - "No need to overwrite transform with the same registration method." + "No need to overwrite transform with the same registration method. Please turn it off or use a different registration method." ) except KeyError: pass From a91c8542bf49f2320a997f85ecd6dec0c29f5685 Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 7 Mar 2025 11:39:45 -0500 Subject: [PATCH 51/58] fixing test for overwrite trasnsform --- CPAC/pipeline/test/test_schema_validation.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 02362ed0f2..18503efeb9 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -128,6 +128,7 @@ def test_pipeline_name(): def test_overwrite_transform(registration_using): """Test that if overwrite transform method is already a registration method.""" # pylint: disable=invalid-name + d = { "registration_workflows": { "anatomical_registration": { @@ -141,6 +142,4 @@ def test_overwrite_transform(registration_using): else: with pytest.raises(ExclusiveInvalid) as e: Configuration(d) - assert "Overwrite transform is found same as the registration method" in str( - e.value - ) + assert "Overwrite transform is found same" in str(e.value) From 01cc82227eb274688dab5151bd21699fed08945e Mon Sep 17 00:00:00 2001 From: "birajstha:construction_worker::penguin" Date: Fri, 7 Mar 2025 11:39:55 -0500 Subject: [PATCH 52/58] fixing test for overwrite trasnsform --- CPAC/pipeline/test/test_schema_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 18503efeb9..7fd1d86103 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -128,7 +128,7 @@ def test_pipeline_name(): def test_overwrite_transform(registration_using): """Test that if overwrite transform method is already a registration method.""" # pylint: disable=invalid-name - + d = { "registration_workflows": { "anatomical_registration": { From 2388a1b00507d667a36ed86d1225829485ee70d1 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Mon, 10 Mar 2025 22:14:15 -0500 Subject: [PATCH 53/58] Update CPAC/pipeline/schema.py Co-authored-by: Jon Cluce --- CPAC/pipeline/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 56a4c54fc6..989ccb1ea0 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1401,7 +1401,7 @@ def schema(config_dict): ]["registration"]["using"] ): raise ExclusiveInvalid( - "[!] Overwrite transform is found same as the anatomical registration method! " + "[!] Overwrite transform method is the same as the anatomical registration method! " "No need to overwrite transform with the same registration method. Please turn it off or use a different registration method." ) except KeyError: From 9396a0c2b47a41df5d4c021647e7378d13bf5b38 Mon Sep 17 00:00:00 2001 From: birajstha <111654544+birajstha@users.noreply.github.com> Date: Mon, 10 Mar 2025 22:14:31 -0500 Subject: [PATCH 54/58] Update CPAC/pipeline/test/test_schema_validation.py Co-authored-by: Jon Cluce --- CPAC/pipeline/test/test_schema_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 7fd1d86103..0b5e20da3f 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -142,4 +142,4 @@ def test_overwrite_transform(registration_using): else: with pytest.raises(ExclusiveInvalid) as e: Configuration(d) - assert "Overwrite transform is found same" in str(e.value) + assert "Overwrite transform method is the same" in str(e.value) From 77f4769343b96301fb2bc7148ad3d4cc4ec39e2a Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 14 Mar 2025 19:08:51 -0400 Subject: [PATCH 55/58] :construction_worker: :loud_sound: Log `git config --global url` --- .circleci/main.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.circleci/main.yml b/.circleci/main.yml index f936f9230d..14e144a0d0 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -117,6 +117,9 @@ commands: steps: - create-docker-test-container: coverage-file: .coverage.docker${VARIANT} + - run: + name: Check Git URL Rewrite Config + command: git config --global --get-regexp url - run: name: Running pytest in Docker image command: docker exec --user $(id -u) docker_test /bin/bash /code/dev/circleci_data/test_in_image.sh From 740ad1dc33e6a648c9ee69525aa64d441c34e326 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 14:25:01 -0400 Subject: [PATCH 56/58] :construction_worker: :whale: :wrench: :octocat: :lock: Don't force SSH when `git clone`ing test data in test image container --- .circleci/main.yml | 7 ------- dev/circleci_data/test_in_image.sh | 3 +++ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/.circleci/main.yml b/.circleci/main.yml index 14e144a0d0..4dfc06f738 100644 --- a/.circleci/main.yml +++ b/.circleci/main.yml @@ -45,10 +45,6 @@ commands: - run: name: "Configuring git user" command: | - sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 78BD65473CB3BD13 - curl -L https://packagecloud.io/circleci/trusty/gpgkey | sudo apt-key add - - sudo apt-get update - sudo apt-get install git openssh-client -y git config --global user.email "CMI_CPAC_Support@childmind.org" git config --global user.name "Theodore (machine user) @ CircleCI" create-docker-test-container: @@ -117,9 +113,6 @@ commands: steps: - create-docker-test-container: coverage-file: .coverage.docker${VARIANT} - - run: - name: Check Git URL Rewrite Config - command: git config --global --get-regexp url - run: name: Running pytest in Docker image command: docker exec --user $(id -u) docker_test /bin/bash /code/dev/circleci_data/test_in_image.sh diff --git a/dev/circleci_data/test_in_image.sh b/dev/circleci_data/test_in_image.sh index b62de84994..d03b6e8015 100755 --- a/dev/circleci_data/test_in_image.sh +++ b/dev/circleci_data/test_in_image.sh @@ -1,5 +1,8 @@ export PATH=$PATH:/home/$(whoami)/.local/bin +# don't force SSH for git clones in testing image +git config --global --unset url.ssh://git@github.com.insteadof + # install testing requirements pip install -r /code/dev/circleci_data/requirements.txt From 30dfc6fcffa3d08ecf5447d39b606e5b7b7a2142 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 15:46:24 -0400 Subject: [PATCH 57/58] :truck: Move global fixtures to own file --- CPAC/_global_fixtures.py | 34 ++++++++++++++++++++++++++++++++++ CPAC/conftest.py | 17 ++--------------- dev/circleci_data/conftest.py | 2 +- 3 files changed, 37 insertions(+), 16 deletions(-) create mode 100644 CPAC/_global_fixtures.py diff --git a/CPAC/_global_fixtures.py b/CPAC/_global_fixtures.py new file mode 100644 index 0000000000..7b765736ee --- /dev/null +++ b/CPAC/_global_fixtures.py @@ -0,0 +1,34 @@ +# Copyright (C) 2025 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global fixtures for C-PAC tests.""" + +from pathlib import Path + +from _pytest.tmpdir import TempPathFactory +from git import Repo +import pytest + + +@pytest.fixture(scope="session") +def bids_examples(tmp_path_factory: TempPathFactory) -> Path: + """Get the BIDS examples dataset.""" + example_dir = tmp_path_factory.mktemp("bids-examples") + if not example_dir.exists() or not any(example_dir.iterdir()): + Repo.clone_from( + "https://github.com/bids-standard/bids-examples.git", str(example_dir) + ) + return example_dir diff --git a/CPAC/conftest.py b/CPAC/conftest.py index 7b765736ee..330489ce0d 100644 --- a/CPAC/conftest.py +++ b/CPAC/conftest.py @@ -16,19 +16,6 @@ # License along with C-PAC. If not, see . """Global fixtures for C-PAC tests.""" -from pathlib import Path +from CPAC._global_fixtures import bids_examples -from _pytest.tmpdir import TempPathFactory -from git import Repo -import pytest - - -@pytest.fixture(scope="session") -def bids_examples(tmp_path_factory: TempPathFactory) -> Path: - """Get the BIDS examples dataset.""" - example_dir = tmp_path_factory.mktemp("bids-examples") - if not example_dir.exists() or not any(example_dir.iterdir()): - Repo.clone_from( - "https://github.com/bids-standard/bids-examples.git", str(example_dir) - ) - return example_dir +__all__ = ["bids_examples"] diff --git a/dev/circleci_data/conftest.py b/dev/circleci_data/conftest.py index ba239b2b4f..4966b986c5 100644 --- a/dev/circleci_data/conftest.py +++ b/dev/circleci_data/conftest.py @@ -16,4 +16,4 @@ # License along with C-PAC. If not, see . """Global fixtures for C-PAC tests.""" -from CPAC.conftest import * # noqa: F403 +from CPAC._global_fixtures import * # noqa: F403 From 766bfd48485989dea5fdecad06abe48bc77e6c05 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Sat, 15 Mar 2025 16:21:41 -0400 Subject: [PATCH 58/58] :white_check_mark: Adjust paths for updated test --- .../data_settings_bids_examples_ds051_default_BIDS.yml | 4 ++-- dev/circleci_data/test_external_utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml index 5449692350..c196250ac8 100644 --- a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml +++ b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml @@ -15,7 +15,7 @@ dataFormat: BIDS # BIDS Data Format only. # # This should be the path to the overarching directory containing the entire dataset. -bidsBaseDir: ./bids-examples/ds051 +bidsBaseDir: ./ds051 # File Path Template for Anatomical Files @@ -49,7 +49,7 @@ awsCredentialsFile: None # Directory where CPAC should place data configuration files. -outputSubjectListLocation: ./dev/circleci_data +outputSubjectListLocation: /code/dev/circleci_data # A label to be appended to the generated participant list files. diff --git a/dev/circleci_data/test_external_utils.py b/dev/circleci_data/test_external_utils.py index 31f6b243da..c55e264c8b 100644 --- a/dev/circleci_data/test_external_utils.py +++ b/dev/circleci_data/test_external_utils.py @@ -94,8 +94,8 @@ def test_build_data_config(caplog, cli_runner, multiword_connector): def test_new_settings_template(bids_examples: Path, caplog, cli_runner): """Test CLI ``utils new-settings-template``.""" caplog.set_level(INFO) - os.chdir(CPAC_DIR) assert bids_examples.exists() + os.chdir(bids_examples) result = cli_runner.invoke( CPAC_main_utils.commands[