From 37fc070103f95b351280586c8e1583aaacde427f Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 13 Nov 2024 14:14:03 -0500 Subject: [PATCH 01/11] :truck: Split longitudinal into anat and func \& :rotating_light: Lint anat --- .../__init__.py | 1 + .../preproc.py} | 0 CPAC/longitudinal/wf/__init__.py | 25 + CPAC/longitudinal/wf/anat.py | 562 +++++++++++++++ .../wf/func.py} | 645 +----------------- CPAC/pipeline/cpac_runner.py | 2 +- CPAC/pipeline/nodeblock.py | 9 +- 7 files changed, 600 insertions(+), 644 deletions(-) rename CPAC/{longitudinal_pipeline => longitudinal}/__init__.py (96%) rename CPAC/{longitudinal_pipeline/longitudinal_preproc.py => longitudinal/preproc.py} (100%) create mode 100644 CPAC/longitudinal/wf/__init__.py create mode 100644 CPAC/longitudinal/wf/anat.py rename CPAC/{longitudinal_pipeline/longitudinal_workflow.py => longitudinal/wf/func.py} (50%) diff --git a/CPAC/longitudinal_pipeline/__init__.py b/CPAC/longitudinal/__init__.py similarity index 96% rename from CPAC/longitudinal_pipeline/__init__.py rename to CPAC/longitudinal/__init__.py index 4545170f29..42bc68c8c5 100644 --- a/CPAC/longitudinal_pipeline/__init__.py +++ b/CPAC/longitudinal/__init__.py @@ -18,6 +18,7 @@ from CPAC.utils.docs import DOCS_URL_PREFIX +assert isinstance(__doc__, str) __doc__ += f""" See {DOCS_URL_PREFIX}/user/longitudinal diff --git a/CPAC/longitudinal_pipeline/longitudinal_preproc.py b/CPAC/longitudinal/preproc.py similarity index 100% rename from CPAC/longitudinal_pipeline/longitudinal_preproc.py rename to CPAC/longitudinal/preproc.py diff --git a/CPAC/longitudinal/wf/__init__.py b/CPAC/longitudinal/wf/__init__.py new file mode 100644 index 0000000000..990d1541f5 --- /dev/null +++ b/CPAC/longitudinal/wf/__init__.py @@ -0,0 +1,25 @@ +# Copyright (C) 2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Workflows for longitudinal preprocessing.""" + +from CPAC.utils.docs import DOCS_URL_PREFIX + +assert isinstance(__doc__, str) +__doc__ += f""" + +See {DOCS_URL_PREFIX}/user/longitudinal +""" # noqa: A001 diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py new file mode 100644 index 0000000000..a60f593146 --- /dev/null +++ b/CPAC/longitudinal/wf/anat.py @@ -0,0 +1,562 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2020-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Longitudinal workflows for anatomical data.""" + +import os +from typing import Optional + +from nipype.interfaces import fsl + +from CPAC.longitudinal.preproc import subject_specific_template +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.pipeline.cpac_pipeline import ( + build_anat_preproc_stack, + build_segmentation_stack, + build_T1w_registration_stack, + connect_pipeline, + initialize_nipype_wf, +) +from CPAC.pipeline.engine import ingress_output_dir, initiate_rpool +from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN +from CPAC.registration.registration import apply_transform +from CPAC.utils.configuration import Configuration +from CPAC.utils.interfaces.function import Function +from CPAC.utils.utils import check_prov_for_regtool + + +@nodeblock( + name="mask_T1w_longitudinal_template", + config=["longitudinal_template_generation"], + switch=["run"], + inputs=["desc-brain_T1w"], + outputs=["space-T1w_desc-brain_mask"], +) +def mask_T1w_longitudinal_template( + wf: pe.Workflow, cfg, strat_pool, pipe_num, opt=None +) -> NODEBLOCK_RETURN: + """Create a native-space brain mask for longitudinal template generation.""" + brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), + name=f"longitudinal_anatomical_brain_mask_{pipe_num}", + ) + brain_mask.inputs.args = "-bin" + + node, out = strat_pool.get_data("desc-brain_T1w") + wf.connect(node, out, brain_mask, "in_file") + + outputs = {"space-T1w_desc-brain_mask": (brain_mask, "out_file")} + + return wf, outputs + + +def pick_map( + file_list: list[list[str]] | list[str], index: str, file_type: str +) -> Optional[str]: + """Choose a file from a list of files.""" + if isinstance(file_list, list): + if len(file_list) == 1 and isinstance(file_list[0], list): + file_list = file_list[0] + for file_name in file_list: + assert isinstance(file_name, str) + if file_name.endswith(f"{file_type}_{index}.nii.gz"): + return file_name + return None + + +def select_session( + session: str, output_brains: list[str], warps: list[str] +) -> tuple[Optional[str], Optional[str]]: + """Select output brain image and warp for given session.""" + brain_path = None + warp_path = None + for brain_path in output_brains: + if f"{session}_" in brain_path: + break + for warp_path in warps: + if f"{session}_" in warp_path: + break + return brain_path, warp_path + + +@nodeblock( + name="mask_longitudinal_T1w_brain", + config=["longitudinal_template_generation"], + switch=["run"], + inputs=["space-longitudinal_desc-brain_T1w"], + outputs=["space-longitudinal_desc-brain_mask"], +) +def mask_longitudinal_T1w_brain( + wf, cfg, strat_pool, pipe_num, opt=None +) -> NODEBLOCK_RETURN: + """Create brain mask for longitudinal T1w image.""" + brain_mask = pe.Node( + interface=fsl.maths.MathsCommand(), + name=f"longitudinal_T1w_brain_mask_{pipe_num}", + ) + brain_mask.inputs.args = "-bin" + + node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") + wf.connect(node, out, brain_mask, "in_file") + + outputs = {"space-longitudinal_desc-brain_mask": (brain_mask, "out_file")} + + return (wf, outputs) + + +@nodeblock( + name="warp_longitudinal_T1w_to_template", + config=["longitudinal_template_generation"], + switch=["run"], + inputs=[ + ( + "space-longitudinal_desc-brain_T1w", + "from-longitudinal_to-template_mode-image_xfm", + ) + ], + outputs=["space-template_desc-brain_T1w"], +) +def warp_longitudinal_T1w_to_template( + wf, cfg, strat_pool, pipe_num, opt=None +) -> NODEBLOCK_RETURN: + """Transform longitudinal T1w images to template space.""" + xfm_prov = strat_pool.get_cpac_provenance( + "from-longitudinal_to-template_mode-image_xfm" + ) + reg_tool = check_prov_for_regtool(xfm_prov) + + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] + + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] + + apply_xfm = apply_transform( + f"warp_longitudinal_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) + + if reg_tool == "ants": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] + elif reg_tool == "fsl": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] + + node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") + wf.connect(node, out, apply_xfm, "inputspec.input_image") + + node, out = strat_pool.get_data("T1w_brain_template") + wf.connect(node, out, apply_xfm, "inputspec.reference") + + node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") + + outputs = {"space-template_desc-brain_T1w": (apply_xfm, "outputspec.output_image")} + + return (wf, outputs) + + +@nodeblock( + name="warp_longitudinal_seg_to_T1w", + config=["longitudinal_template_generation"], + switch=["run"], + inputs=[ + ( + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm", + "space-longitudinal_label-CSF_mask", + "space-longitudinal_label-GM_mask", + "space-longitudinal_label-WM_mask", + "space-longitudinal_label-CSF_desc-preproc_mask", + "space-longitudinal_label-GM_desc-preproc_mask", + "space-longitudinal_label-WM_desc-preproc_mask", + "space-longitudinal_label-CSF_probseg", + "space-longitudinal_label-GM_probseg", + "space-longitudinal_label-WM_probseg", + ) + ], + outputs=[ + "label-CSF_mask", + "label-GM_mask", + "label-WM_mask", + "label-CSF_desc-preproc_mask", + "label-GM_desc-preproc_mask", + "label-WM_desc-preproc_mask", + "label-CSF_probseg", + "label-GM_probseg", + "label-WM_probseg", + ], +) +def warp_longitudinal_seg_to_T1w( + wf, cfg, strat_pool, pipe_num, opt=None +) -> NODEBLOCK_RETURN: + """Transform anatomical images from longitudinal space template space.""" + xfm_prov = strat_pool.get_cpac_provenance( + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" + ) + reg_tool = check_prov_for_regtool(xfm_prov) + + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] + + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] + + outputs = {} + + labels = [ + "CSF_mask", + "CSF_desc-preproc_mask", + "CSF_probseg", + "GM_mask", + "GM_desc-preproc_mask", + "GM_probseg", + "WM_mask", + "WM_desc-preproc_mask", + "WM_probseg", + ] + + for label in labels: + apply_xfm = apply_transform( + f"warp_longitudinal_seg_to_T1w_{label}_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) + + if reg_tool == "ants": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["ANTs"]["interpolation"] + elif reg_tool == "fsl": + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "anatomical_registration" + ]["registration"]["FSL-FNIRT"]["interpolation"] + + node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") + wf.connect(node, out, apply_xfm, "inputspec.input_image") + + node, out = strat_pool.get_data("T1w_brain_template") + wf.connect(node, out, apply_xfm, "inputspec.reference") + + node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") + + outputs[f"label-{label}"] = (apply_xfm, "outputspec.output_image") + + return (wf, outputs) + + +def anat_longitudinal_wf( + subject_id: str, sub_list: list[dict], config: Configuration +) -> None: + """ + Create and run longitudinal workflows for anatomical data. + + Parameters + ---------- + subject_id + the id of the subject + sub_list + a list of sessions for one subject + config + a Configuration object containing the information for the participant pipeline + """ + # list of lists for every strategy + session_id_list = [] + session_wfs = {} + + cpac_dirs = [] + out_dir = config.pipeline_setup["output_directory"]["path"] + + orig_pipe_name = config.pipeline_setup["pipeline_name"] + + # Loop over the sessions to create the input for the longitudinal + # algorithm + for session in sub_list: + unique_id = session["unique_id"] + session_id_list.append(unique_id) + + try: + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): + if os.path.exists(creds_path): + input_creds_path = os.path.abspath(creds_path) + else: + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) + raise Exception(err_msg) + else: + input_creds_path = None + except KeyError: + input_creds_path = None + + workflow = initialize_nipype_wf( + config, + sub_list[0], + # just grab the first one for the name + name="anat_longitudinal_pre-preproc", + ) + + workflow, rpool = initiate_rpool(workflow, config, session) + pipeline_blocks = build_anat_preproc_stack(rpool, config) + workflow = connect_pipeline(workflow, config, rpool, pipeline_blocks) + + session_wfs[unique_id] = rpool + + rpool.gather_pipes(workflow, config) + + workflow.run() + + cpac_dir = os.path.join( + out_dir, f"pipeline_{orig_pipe_name}", f"{subject_id}_{unique_id}" + ) + cpac_dirs.append(os.path.join(cpac_dir, "anat")) + + # Now we have all the anat_preproc set up for every session + # loop over the different anat preproc strategies + strats_brain_dct = {} + strats_head_dct = {} + for cpac_dir in cpac_dirs: + if os.path.isdir(cpac_dir): + for filename in os.listdir(cpac_dir): + if "T1w.nii" in filename: + for tag in filename.split("_"): + if "desc-" in tag and "brain" in tag: + if tag not in strats_brain_dct: + strats_brain_dct[tag] = [] + strats_brain_dct[tag].append( + os.path.join(cpac_dir, filename) + ) + if tag not in strats_head_dct: + strats_head_dct[tag] = [] + head_file = filename.replace(tag, "desc-reorient") + strats_head_dct[tag].append( + os.path.join(cpac_dir, head_file) + ) + + for strat in strats_brain_dct.keys(): + wf = initialize_nipype_wf( + config, + sub_list[0], + # just grab the first one for the name + name=f"template_node_{strat}", + ) + + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + + template_node_name = f"longitudinal_anat_template_{strat}" + + # This node will generate the longitudinal template (the functions are + # in longitudinal_preproc) + # Later other algorithms could be added to calculate it, like the + # multivariate template from ANTS + # It would just require to change it here. + template_node = subject_specific_template(workflow_name=template_node_name) + + template_node.inputs.set( + avg_method=config.longitudinal_template_generation["average_method"], + dof=config.longitudinal_template_generation["dof"], + interp=config.longitudinal_template_generation["interp"], + cost=config.longitudinal_template_generation["cost"], + convergence_threshold=config.longitudinal_template_generation[ + "convergence_threshold" + ], + thread_pool=config.longitudinal_template_generation["thread_pool"], + unique_id_list=list(session_wfs.keys()), + ) + + template_node.inputs.input_brain_list = strats_brain_dct[strat] + template_node.inputs.input_skull_list = strats_head_dct[strat] + + long_id = f"longitudinal_{subject_id}_strat-{strat}" + + wf, rpool = initiate_rpool(wf, config, part_id=long_id) + + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) + + rpool.set_data( + "space-longitudinal_desc-brain_T1w-template", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) + + rpool.set_data( + "space-longitudinal_desc-reorient_T1w", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) + + rpool.set_data( + "space-longitudinal_desc-reorient_T1w-template", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) + + pipeline_blocks = [mask_longitudinal_T1w_brain] + + pipeline_blocks = build_T1w_registration_stack(rpool, config, pipeline_blocks) + + pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) + + wf = connect_pipeline(wf, config, rpool, pipeline_blocks) + + excl = [ + "space-longitudinal_desc-brain_T1w", + "space-longitudinal_desc-reorient_T1w", + "space-longitudinal_desc-brain_mask", + ] + rpool.gather_pipes(wf, config, add_excl=excl) + + # this is going to run multiple times! + # once for every strategy! + wf.run() + + # now, just write out a copy of the above to each session + config.pipeline_setup["pipeline_name"] = orig_pipe_name + for session in sub_list: + unique_id = session["unique_id"] + + try: + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): + if os.path.exists(creds_path): + input_creds_path = os.path.abspath(creds_path) + else: + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) + raise Exception(err_msg) + else: + input_creds_path = None + except KeyError: + input_creds_path = None + + wf = initialize_nipype_wf(config, sub_list[0]) + + wf, rpool = initiate_rpool(wf, config, session) + + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + rpool = ingress_output_dir( + wf, + config, + rpool, + long_id, + data_paths=session, + part_id=subject_id, + ses_id=unique_id, + creds_path=input_creds_path, + ) + + select_node_name = f"select_{unique_id}" + select_sess = pe.Node( + Function( + input_names=["session", "output_brains", "warps"], + output_names=["brain_path", "warp_path"], + function=select_session, + ), + name=select_node_name, + ) + select_sess.inputs.session = unique_id + + wf.connect(template_node, "output_brain_list", select_sess, "output_brains") + wf.connect(template_node, "warp_list", select_sess, "warps") + + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + select_sess, + "brain_path", + {}, + "", + select_node_name, + ) + + rpool.set_data( + "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", + select_sess, + "warp_path", + {}, + "", + select_node_name, + ) + + config.pipeline_setup["pipeline_name"] = orig_pipe_name + excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] + + rpool.gather_pipes(wf, config, add_excl=excl) + wf.run() + + # begin single-session stuff again + for session in sub_list: + unique_id = session["unique_id"] + + try: + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): + if os.path.exists(creds_path): + input_creds_path = os.path.abspath(creds_path) + else: + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) + raise Exception(err_msg) + else: + input_creds_path = None + except KeyError: + input_creds_path = None + + wf = initialize_nipype_wf(config, sub_list[0]) + + wf, rpool = initiate_rpool(wf, config, session) + + pipeline_blocks = [ + warp_longitudinal_T1w_to_template, + warp_longitudinal_seg_to_T1w, + ] + + wf = connect_pipeline(wf, config, rpool, pipeline_blocks) + + rpool.gather_pipes(wf, config) + + # this is going to run multiple times! + # once for every strategy! + wf.run() diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal/wf/func.py similarity index 50% rename from CPAC/longitudinal_pipeline/longitudinal_workflow.py rename to CPAC/longitudinal/wf/func.py index 5c989675c1..67b035a67d 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal/wf/func.py @@ -15,662 +15,25 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . +"""Longitudinal workflows for anatomical data.""" + import os -from nipype.interfaces import fsl import nipype.interfaces.io as nio -from indi_aws import aws_utils -from CPAC.longitudinal_pipeline.longitudinal_preproc import subject_specific_template +from CPAC.longitudinal.preproc import subject_specific_template from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.cpac_pipeline import ( - build_anat_preproc_stack, - build_segmentation_stack, - build_T1w_registration_stack, - connect_pipeline, - initialize_nipype_wf, -) -from CPAC.pipeline.engine import ingress_output_dir, initiate_rpool -from CPAC.pipeline.nodeblock import nodeblock from CPAC.registration import ( create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_wf_calculate_ants_warp, ) -from CPAC.registration.registration import apply_transform from CPAC.utils.datasource import ( resolve_resolution, ) -from CPAC.utils.interfaces.datasink import DataSink from CPAC.utils.interfaces.function import Function from CPAC.utils.strategy import Strategy -from CPAC.utils.utils import check_config_resources, check_prov_for_regtool - - -@nodeblock( - name="mask_T1w_longitudinal_template", - config=["longitudinal_template_generation"], - switch=["run"], - inputs=["desc-brain_T1w"], - outputs=["space-T1w_desc-brain_mask"], -) -def mask_T1w_longitudinal_template(wf, cfg, strat_pool, pipe_num, opt=None): - brain_mask = pe.Node( - interface=fsl.maths.MathsCommand(), - name=f"longitudinal_anatomical_brain_mask_{pipe_num}", - ) - brain_mask.inputs.args = "-bin" - - node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, brain_mask, "in_file") - - outputs = {"space-T1w_desc-brain_mask": (brain_mask, "out_file")} - - return (wf, outputs) - - -def create_datasink( - datasink_name, - config, - subject_id, - session_id="", - strat_name="", - map_node_iterfield=None, -) -> pe.Node | pe.MapNode: - """ - Parameters - ---------- - datasink_name - config - subject_id - session_id - strat_name - map_node_iterfield - """ - encrypt_data = config.pipeline_setup["Amazon-AWS"]["s3_encryption"] - - # TODO Enforce value with schema validation - # Extract credentials path for output if it exists - try: - # Get path to creds file - creds_path = "" - if config.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"]: - creds_path = str( - config.pipeline_setup["Amazon-AWS"]["aws_output_bucket_credentials"] - ) - creds_path = os.path.abspath(creds_path) - - if ( - config.pipeline_setup["output_directory"]["path"] - .lower() - .startswith("s3://") - ): - # Test for s3 write access - s3_write_access = aws_utils.test_bucket_access( - creds_path, config.pipeline_setup["output_directory"]["path"] - ) - - if not s3_write_access: - msg = "Not able to write to bucket!" - raise Exception(msg) - - except Exception as e: - if ( - config.pipeline_setup["output_directory"]["path"] - .lower() - .startswith("s3://") - ): - err_msg = ( - "There was an error processing credentials or " - "accessing the S3 bucket. Check and try again.\n" - "Error: %s" % e - ) - raise Exception(err_msg) - - if map_node_iterfield is not None: - ds = pe.MapNode( - DataSink(infields=map_node_iterfield), - name=f"sinker_{datasink_name}", - iterfield=map_node_iterfield, - ) - else: - ds = pe.Node(DataSink(), name=f"sinker_{datasink_name}") - - ds.inputs.base_directory = config.pipeline_setup["output_directory"]["path"] - ds.inputs.creds_path = creds_path - ds.inputs.encrypt_bucket_keys = encrypt_data - ds.inputs.container = os.path.join( - "pipeline_%s_%s" % (config.pipeline_setup["pipeline_name"], strat_name), - subject_id, - session_id, - ) - return ds - - -def connect_anat_preproc_inputs( - strat, anat_preproc, strat_name, strat_nodes_list_list, workflow -): - """ - Parameters - ---------- - strat : Strategy - the strategy object you want to fork - anat_preproc : Workflow - the anat_preproc workflow node to be connected and added to the resource pool - strat_name : str - name of the strategy - strat_nodes_list_list : list - a list of strat_nodes_list - workflow : Workflow - main longitudinal workflow - - Returns - ------- - new_strat : Strategy - the fork of strat with the resource pool updated - strat_nodes_list_list : list - a list of strat_nodes_list - """ - new_strat = strat.fork() - - tmp_node, out_key = new_strat["anatomical"] - workflow.connect(tmp_node, out_key, anat_preproc, "inputspec.anat") - - tmp_node, out_key = new_strat["template_cmass"] - workflow.connect(tmp_node, out_key, anat_preproc, "inputspec.template_cmass") - - new_strat.append_name(anat_preproc.name) - - new_strat.update_resource_pool( - { - "anatomical_brain": (anat_preproc, "outputspec.brain"), - "anatomical_skull_leaf": (anat_preproc, "outputspec.reorient"), - "anatomical_brain_mask": (anat_preproc, "outputspec.brain_mask"), - } - ) - - try: - strat_nodes_list_list[strat_name].append(new_strat) - except KeyError: - strat_nodes_list_list[strat_name] = [new_strat] - - return new_strat, strat_nodes_list_list - - -def pick_map(file_list, index, file_type): - if isinstance(file_list, list): - if len(file_list) == 1: - file_list = file_list[0] - for file_name in file_list: - if file_name.endswith(f"{file_type}_{index}.nii.gz"): - return file_name - return None - - -def select_session(session, output_brains, warps): - brain_path = None - warp_path = None - for brain_path in output_brains: - if f"{session}_" in brain_path: - break - for warp_path in warps: - if f"{session}_" in warp_path: - break - return (brain_path, warp_path) - - -@nodeblock( - name="mask_longitudinal_T1w_brain", - config=["longitudinal_template_generation"], - switch=["run"], - inputs=["space-longitudinal_desc-brain_T1w"], - outputs=["space-longitudinal_desc-brain_mask"], -) -def mask_longitudinal_T1w_brain(wf, cfg, strat_pool, pipe_num, opt=None): - brain_mask = pe.Node( - interface=fsl.maths.MathsCommand(), - name=f"longitudinal_T1w_brain_mask_{pipe_num}", - ) - brain_mask.inputs.args = "-bin" - - node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, brain_mask, "in_file") - - outputs = {"space-longitudinal_desc-brain_mask": (brain_mask, "out_file")} - - return (wf, outputs) - - -@nodeblock( - name="warp_longitudinal_T1w_to_template", - config=["longitudinal_template_generation"], - switch=["run"], - inputs=[ - ( - "space-longitudinal_desc-brain_T1w", - "from-longitudinal_to-template_mode-image_xfm", - ) - ], - outputs=["space-template_desc-brain_T1w"], -) -def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - "from-longitudinal_to-template_mode-image_xfm" - ) - reg_tool = check_prov_for_regtool(xfm_prov) - - num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - - num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - - apply_xfm = apply_transform( - f"warp_longitudinal_to_T1template_{pipe_num}", - reg_tool, - time_series=False, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores, - ) - - if reg_tool == "ants": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "anatomical_registration" - ]["registration"]["ANTs"]["interpolation"] - elif reg_tool == "fsl": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "anatomical_registration" - ]["registration"]["FSL-FNIRT"]["interpolation"] - - node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, apply_xfm, "inputspec.input_image") - - node, out = strat_pool.get_data("T1w_brain_template") - wf.connect(node, out, apply_xfm, "inputspec.reference") - - node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, "inputspec.transform") - - outputs = {"space-template_desc-brain_T1w": (apply_xfm, "outputspec.output_image")} - - return (wf, outputs) - - -@nodeblock( - name="warp_longitudinal_seg_to_T1w", - config=["longitudinal_template_generation"], - switch=["run"], - inputs=[ - ( - "from-longitudinal_to-T1w_mode-image_desc-linear_xfm", - "space-longitudinal_label-CSF_mask", - "space-longitudinal_label-GM_mask", - "space-longitudinal_label-WM_mask", - "space-longitudinal_label-CSF_desc-preproc_mask", - "space-longitudinal_label-GM_desc-preproc_mask", - "space-longitudinal_label-WM_desc-preproc_mask", - "space-longitudinal_label-CSF_probseg", - "space-longitudinal_label-GM_probseg", - "space-longitudinal_label-WM_probseg", - ) - ], - outputs=[ - "label-CSF_mask", - "label-GM_mask", - "label-WM_mask", - "label-CSF_desc-preproc_mask", - "label-GM_desc-preproc_mask", - "label-WM_desc-preproc_mask", - "label-CSF_probseg", - "label-GM_probseg", - "label-WM_probseg", - ], -) -def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): - xfm_prov = strat_pool.get_cpac_provenance( - "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" - ) - reg_tool = check_prov_for_regtool(xfm_prov) - - num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - - num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - - outputs = {} - - labels = [ - "CSF_mask", - "CSF_desc-preproc_mask", - "CSF_probseg", - "GM_mask", - "GM_desc-preproc_mask", - "GM_probseg", - "WM_mask", - "WM_desc-preproc_mask", - "WM_probseg", - ] - - for label in labels: - apply_xfm = apply_transform( - f"warp_longitudinal_seg_to_T1w_{label}_{pipe_num}", - reg_tool, - time_series=False, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores, - ) - - if reg_tool == "ants": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "anatomical_registration" - ]["registration"]["ANTs"]["interpolation"] - elif reg_tool == "fsl": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "anatomical_registration" - ]["registration"]["FSL-FNIRT"]["interpolation"] - - node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") - wf.connect(node, out, apply_xfm, "inputspec.input_image") - - node, out = strat_pool.get_data("T1w_brain_template") - wf.connect(node, out, apply_xfm, "inputspec.reference") - - node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, "inputspec.transform") - - outputs[f"label-{label}"] = (apply_xfm, "outputspec.output_image") - - return (wf, outputs) - - -def anat_longitudinal_wf(subject_id, sub_list, config): - """ - Parameters - ---------- - subject_id : str - the id of the subject - sub_list : list of dict - this is a list of sessions for one subject and each session if the same dictionary as the one given to - prep_workflow - config : configuration - a configuration object containing the information of the pipeline config. (Same as for prep_workflow) - - Returns - ------- - None - """ - # list of lists for every strategy - session_id_list = [] - session_wfs = {} - - cpac_dirs = [] - out_dir = config.pipeline_setup["output_directory"]["path"] - - orig_pipe_name = config.pipeline_setup["pipeline_name"] - - # Loop over the sessions to create the input for the longitudinal - # algorithm - for session in sub_list: - unique_id = session["unique_id"] - session_id_list.append(unique_id) - - try: - creds_path = session["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" ' - 'session "%s" was not found. Check this path ' - "and try again." % (creds_path, subject_id, unique_id) - ) - raise Exception(err_msg) - else: - input_creds_path = None - except KeyError: - input_creds_path = None - - workflow = initialize_nipype_wf( - config, - sub_list[0], - # just grab the first one for the name - name="anat_longitudinal_pre-preproc", - ) - - workflow, rpool = initiate_rpool(workflow, config, session) - pipeline_blocks = build_anat_preproc_stack(rpool, config) - workflow = connect_pipeline(workflow, config, rpool, pipeline_blocks) - - session_wfs[unique_id] = rpool - - rpool.gather_pipes(workflow, config) - - workflow.run() - - cpac_dir = os.path.join( - out_dir, f"pipeline_{orig_pipe_name}", f"{subject_id}_{unique_id}" - ) - cpac_dirs.append(os.path.join(cpac_dir, "anat")) - - # Now we have all the anat_preproc set up for every session - # loop over the different anat preproc strategies - strats_brain_dct = {} - strats_head_dct = {} - for cpac_dir in cpac_dirs: - if os.path.isdir(cpac_dir): - for filename in os.listdir(cpac_dir): - if "T1w.nii" in filename: - for tag in filename.split("_"): - if "desc-" in tag and "brain" in tag: - if tag not in strats_brain_dct: - strats_brain_dct[tag] = [] - strats_brain_dct[tag].append( - os.path.join(cpac_dir, filename) - ) - if tag not in strats_head_dct: - strats_head_dct[tag] = [] - head_file = filename.replace(tag, "desc-reorient") - strats_head_dct[tag].append( - os.path.join(cpac_dir, head_file) - ) - - for strat in strats_brain_dct.keys(): - wf = initialize_nipype_wf( - config, - sub_list[0], - # just grab the first one for the name - name=f"template_node_{strat}", - ) - - config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" - - template_node_name = f"longitudinal_anat_template_{strat}" - - # This node will generate the longitudinal template (the functions are - # in longitudinal_preproc) - # Later other algorithms could be added to calculate it, like the - # multivariate template from ANTS - # It would just require to change it here. - template_node = subject_specific_template(workflow_name=template_node_name) - - template_node.inputs.set( - avg_method=config.longitudinal_template_generation["average_method"], - dof=config.longitudinal_template_generation["dof"], - interp=config.longitudinal_template_generation["interp"], - cost=config.longitudinal_template_generation["cost"], - convergence_threshold=config.longitudinal_template_generation[ - "convergence_threshold" - ], - thread_pool=config.longitudinal_template_generation["thread_pool"], - unique_id_list=list(session_wfs.keys()), - ) - - template_node.inputs.input_brain_list = strats_brain_dct[strat] - template_node.inputs.input_skull_list = strats_head_dct[strat] - - long_id = f"longitudinal_{subject_id}_strat-{strat}" - - wf, rpool = initiate_rpool(wf, config, part_id=long_id) - - rpool.set_data( - "space-longitudinal_desc-brain_T1w", - template_node, - "brain_template", - {}, - "", - template_node_name, - ) - - rpool.set_data( - "space-longitudinal_desc-brain_T1w-template", - template_node, - "brain_template", - {}, - "", - template_node_name, - ) - - rpool.set_data( - "space-longitudinal_desc-reorient_T1w", - template_node, - "skull_template", - {}, - "", - template_node_name, - ) - - rpool.set_data( - "space-longitudinal_desc-reorient_T1w-template", - template_node, - "skull_template", - {}, - "", - template_node_name, - ) - - pipeline_blocks = [mask_longitudinal_T1w_brain] - - pipeline_blocks = build_T1w_registration_stack(rpool, config, pipeline_blocks) - - pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) - - wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - - excl = [ - "space-longitudinal_desc-brain_T1w", - "space-longitudinal_desc-reorient_T1w", - "space-longitudinal_desc-brain_mask", - ] - rpool.gather_pipes(wf, config, add_excl=excl) - - # this is going to run multiple times! - # once for every strategy! - wf.run() - - # now, just write out a copy of the above to each session - config.pipeline_setup["pipeline_name"] = orig_pipe_name - for session in sub_list: - unique_id = session["unique_id"] - - try: - creds_path = session["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" ' - 'session "%s" was not found. Check this path ' - "and try again." % (creds_path, subject_id, unique_id) - ) - raise Exception(err_msg) - else: - input_creds_path = None - except KeyError: - input_creds_path = None - - wf = initialize_nipype_wf(config, sub_list[0]) - - wf, rpool = initiate_rpool(wf, config, session) - - config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" - rpool = ingress_output_dir( - config, rpool, long_id, creds_path=input_creds_path - ) - - select_node_name = f"select_{unique_id}" - select_sess = pe.Node( - Function( - input_names=["session", "output_brains", "warps"], - output_names=["brain_path", "warp_path"], - function=select_session, - ), - name=select_node_name, - ) - select_sess.inputs.session = unique_id - - wf.connect(template_node, "output_brain_list", select_sess, "output_brains") - wf.connect(template_node, "warp_list", select_sess, "warps") - - rpool.set_data( - "space-longitudinal_desc-brain_T1w", - select_sess, - "brain_path", - {}, - "", - select_node_name, - ) - - rpool.set_data( - "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", - select_sess, - "warp_path", - {}, - "", - select_node_name, - ) - - config.pipeline_setup["pipeline_name"] = orig_pipe_name - excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] - - rpool.gather_pipes(wf, config, add_excl=excl) - wf.run() - - # begin single-session stuff again - for session in sub_list: - unique_id = session["unique_id"] - - try: - creds_path = session["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" ' - 'session "%s" was not found. Check this path ' - "and try again." % (creds_path, subject_id, unique_id) - ) - raise Exception(err_msg) - else: - input_creds_path = None - except KeyError: - input_creds_path = None - - wf = initialize_nipype_wf(config, sub_list[0]) - - wf, rpool = initiate_rpool(wf, config, session) - - pipeline_blocks = [ - warp_longitudinal_T1w_to_template, - warp_longitudinal_seg_to_T1w, - ] - - wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - - rpool.gather_pipes(wf, config) - - # this is going to run multiple times! - # once for every strategy! - wf.run() +from CPAC.utils.utils import check_config_resources # TODO check: diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index e5eef08138..6dc1241036 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -24,7 +24,7 @@ from voluptuous.error import Invalid import yaml -from CPAC.longitudinal_pipeline.longitudinal_workflow import anat_longitudinal_wf +from CPAC.longitudinal.wf.anat import anat_longitudinal_wf from CPAC.pipeline.utils import get_shell from CPAC.utils.configuration import check_pname, Configuration, set_subject from CPAC.utils.configuration.yaml_template import upgrade_pipeline_to_1_8 diff --git a/CPAC/pipeline/nodeblock.py b/CPAC/pipeline/nodeblock.py index 53b9db1330..305119b55d 100644 --- a/CPAC/pipeline/nodeblock.py +++ b/CPAC/pipeline/nodeblock.py @@ -16,7 +16,12 @@ # License along with C-PAC. If not, see . """Class and decorator for NodeBlock functions.""" -from typing import Any, Callable, Optional +from collections.abc import Mapping +from typing import Any, Callable, Optional, TypeAlias + +from nipype.pipeline import engine as pe + +NODEBLOCK_RETURN: TypeAlias = tuple[pe.Workflow, Mapping[str, tuple[pe.Node, str]]] class NodeBlockFunction: @@ -78,7 +83,7 @@ def __init__( ).rstrip() # all node block functions have this signature - def __call__(self, wf, cfg, strat_pool, pipe_num, opt=None): + def __call__(self, wf, cfg, strat_pool, pipe_num, opt=None) -> NODEBLOCK_RETURN: """ Parameters From 9d093582380e4b5fd43e963d819e87bbe45d5158 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 13 Nov 2024 15:10:51 -0500 Subject: [PATCH 02/11] :wrench: Update config to allow choosing `mri_robust_template` for longitudinal (new default) [skip ci] --- CHANGELOG.md | 2 + CPAC/longitudinal/robust_template.py | 18 ++++++++ CPAC/longitudinal/wf/anat.py | 16 +++++-- CPAC/pipeline/schema.py | 43 ++++++++++++++----- .../configs/1.7-1.8-nesting-mappings.yml | 3 ++ .../configs/pipeline_config_blank.yml | 42 +++++++++++------- .../configs/pipeline_config_default.yml | 42 ++++++++++++------ 7 files changed, 123 insertions(+), 43 deletions(-) create mode 100644 CPAC/longitudinal/robust_template.py diff --git a/CHANGELOG.md b/CHANGELOG.md index be5ec4a432..22ef913fa9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,12 +24,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. +- Added `mri_robust_template` for longitudinal template generation. ### Changed - Moved `pygraphviz` from requirements to `graphviz` optional dependencies group. - Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files. - Made orientation configurable (was hard-coded as "RPI"). +- Made `mri_robust_template` default implementation for longitudinal template generation. ### Fixed diff --git a/CPAC/longitudinal/robust_template.py b/CPAC/longitudinal/robust_template.py new file mode 100644 index 0000000000..dccbbb12f5 --- /dev/null +++ b/CPAC/longitudinal/robust_template.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Create longitudinal template using ``mri_robust_template``.""" diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index a60f593146..94a7cb7336 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -43,6 +43,8 @@ name="mask_T1w_longitudinal_template", config=["longitudinal_template_generation"], switch=["run"], + option_key="using", + option_val="C-PAC legacy", inputs=["desc-brain_T1w"], outputs=["space-T1w_desc-brain_mask"], ) @@ -97,6 +99,8 @@ def select_session( name="mask_longitudinal_T1w_brain", config=["longitudinal_template_generation"], switch=["run"], + option_key="using", + option_val="C-PAC legacy", inputs=["space-longitudinal_desc-brain_T1w"], outputs=["space-longitudinal_desc-brain_mask"], ) @@ -122,6 +126,8 @@ def mask_longitudinal_T1w_brain( name="warp_longitudinal_T1w_to_template", config=["longitudinal_template_generation"], switch=["run"], + option_key="using", + option_val="C-PAC legacy", inputs=[ ( "space-longitudinal_desc-brain_T1w", @@ -178,6 +184,8 @@ def warp_longitudinal_T1w_to_template( name="warp_longitudinal_seg_to_T1w", config=["longitudinal_template_generation"], switch=["run"], + option_key="using", + option_val="C-PAC legacy", inputs=[ ( "from-longitudinal_to-T1w_mode-image_desc-linear_xfm", @@ -376,12 +384,14 @@ def anat_longitudinal_wf( template_node.inputs.set( avg_method=config.longitudinal_template_generation["average_method"], dof=config.longitudinal_template_generation["dof"], - interp=config.longitudinal_template_generation["interp"], - cost=config.longitudinal_template_generation["cost"], + interp=config.longitudinal_template_generation["legacy-specific"]["interp"], + cost=config.longitudinal_template_generation["legacy-specific"]["cost"], convergence_threshold=config.longitudinal_template_generation[ "convergence_threshold" ], - thread_pool=config.longitudinal_template_generation["thread_pool"], + thread_pool=config.longitudinal_template_generation["legacy-specific"][ + "thread_pool" + ], unique_id_list=list(session_wfs.keys()), ) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 997c6267b8..fe0cfe4505 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -848,22 +848,32 @@ def sanitize(filename): }, "longitudinal_template_generation": { "run": bool1_1, + "using": In({"mri_robust_template", "C-PAC legacy"}), "average_method": In({"median", "mean", "std"}), "dof": In({12, 9, 7, 6}), - "interp": In({"trilinear", "nearestneighbour", "sinc", "spline"}), - "cost": In( + "convergence_threshold": Number, + "max_iter": int, + "legacy-specific": Maybe( { - "corratio", - "mutualinfo", - "normmi", - "normcorr", - "leastsq", - "labeldiff", - "bbr", + "interp": Maybe( + In({"trilinear", "nearestneighbour", "sinc", "spline"}) + ), + "cost": Maybe( + In( + { + "corratio", + "mutualinfo", + "normmi", + "normcorr", + "leastsq", + "labeldiff", + "bbr", + } + ) + ), + "thread_pool": Maybe(int), } ), - "thread_pool": int, - "convergence_threshold": Number, }, "functional_preproc": { "run": bool1_1, @@ -1418,6 +1428,17 @@ def schema(config_dict): raise OSError(msg) from error except KeyError: pass + try: + # check for incompatible longitudinal options + lgt = partially_validated["longitudinal_template_generation"] + if lgt["using"] == "mri_robust_template": + error_msg = "{value} is not implemented for longitudinal {key} in `mri_robust_template`." + for key, value in [("average_method", "std"), ("dof", 9), ("max_iter", -1)]: + if lgt[key] == value: + msg = error_msg.format(key=key, value=value) + raise NotImplementedError(msg) + except KeyError: + pass return partially_validated diff --git a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml index dd83685bc1..8ad79125b0 100644 --- a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml +++ b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml @@ -322,12 +322,15 @@ longitudinal_template_dof: - dof longitudinal_template_interp: - longitudinal_template_generation + - legacy-specific - interp longitudinal_template_cost: - longitudinal_template_generation + - legacy-specific - cost longitudinal_template_thread_pool: - longitudinal_template_generation + - legacy-specific - thread_pool longitudinal_template_convergence_threshold: - longitudinal_template_generation diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 454d8add59..e95bbeb6ae 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -14,7 +14,6 @@ pipeline_setup: # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" desired_orientation: RPI - output_directory: # Quality control outputs @@ -1515,30 +1514,43 @@ longitudinal_template_generation: # at once. run: Off - # Freesurfer longitudinal template algorithm using FSL FLIRT + # Implementation to use + # Options: mri_robust_template, C-PAC legacy + using: mri_robust_template + # Method to average the dataset at each iteration of the template creation - # Options: median, mean or std + # Options: median, mean + # Additional option if using "C-PAC legacy": std average_method: median # Degree of freedom for FLIRT in the template creation - # Options: 12 (affine), 9 (traditional), 7 (global rescale) or 6 (rigid body) + # Options: 12 (affine), 7 (global rescale), or 6 (rigid body) + # Additional option if using "C-PAC legacy": 9 (traditional) dof: 12 - # Interpolation parameter for FLIRT in the template creation - # Options: trilinear, nearestneighbour, sinc or spline - interp: trilinear - - # Cost function for FLIRT in the template creation - # Options: corratio, mutualinfo, normmi, normcorr, leastsq, labeldiff or bbr - cost: corratio - - # Number of threads used for one run of the template generation algorithm - thread_pool: 2 - # Threshold of transformation distance to consider that the loop converged # (-1 means numpy.finfo(np.float64).eps and is the default) convergence_threshold: -1 + # Maximum iterations + # Stop after this many iterations, even if still above convergence_threshold + # Additional option if using "C-PAC legacy": -1 means loop forever until reaching convergence threshold + max_iter: 5 + + # Options for C-PAC legacy implementation that are not configurable in mri_robust_template + legacy-specific: + + # Interpolation parameter for FLIRT in the template creation + # Options: trilinear, nearestneighbour, sinc or spline + interp: + + # Cost function for FLIRT in the template creation + # Options: corratio, mutualinfo, normmi, normcorr, leastsq, labeldiff or bbr + cost: + + # Number of threads used for one run of the template generation algorithm + thread_pool: + # OUTPUTS AND DERIVATIVES # ----------------------- post_processing: diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index b7aa56c13f..e22da986de 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -13,6 +13,8 @@ pipeline_setup: # This string will be sanitized and used in filepaths pipeline_name: cpac-default-pipeline + # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS" + desired_orientation: RPI output_directory: # Directory where C-PAC should write out processed data, logs, and crash reports. @@ -248,30 +250,42 @@ longitudinal_template_generation: # at once. run: Off - # Freesurfer longitudinal template algorithm using FSL FLIRT + # Implementation to use + # Options: mri_robust_template, C-PAC legacy + using: mri_robust_template + # Method to average the dataset at each iteration of the template creation - # Options: median, mean or std + # Options: median, mean + # Additional option if using "C-PAC legacy": std average_method: median # Degree of freedom for FLIRT in the template creation - # Options: 12 (affine), 9 (traditional), 7 (global rescale) or 6 (rigid body) + # Options: 12 (affine), 7 (global rescale), or 6 (rigid body) + # Additional option if using "C-PAC legacy": 9 (traditional) dof: 12 - # Interpolation parameter for FLIRT in the template creation - # Options: trilinear, nearestneighbour, sinc or spline - interp: trilinear - - # Cost function for FLIRT in the template creation - # Options: corratio, mutualinfo, normmi, normcorr, leastsq, labeldiff or bbr - cost: corratio - - # Number of threads used for one run of the template generation algorithm - thread_pool: 2 - # Threshold of transformation distance to consider that the loop converged # (-1 means numpy.finfo(np.float64).eps and is the default) convergence_threshold: -1 + # Maximum iterations + # Stop after this many iterations, even if still above convergence_threshold + # Additional option if using "C-PAC legacy": -1 means loop forever until reaching convergence threshold + max_iter: 5 + + # Options for C-PAC legacy implementation that are not configurable in mri_robust_template + legacy-specific: + + # Interpolation parameter for FLIRT in the template creation + # Options: trilinear, nearestneighbour, sinc or spline + interp: trilinear + + # Cost function for FLIRT in the template creation + # Options: corratio, mutualinfo, normmi, normcorr, leastsq, labeldiff or bbr + cost: corratio + + # Number of threads used for one run of the template generation algorithm + thread_pool: 2 anatomical_preproc: From dc4c2df81b667534060976e9b2d387ae24f5d5fc Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 13 Nov 2024 17:07:40 -0500 Subject: [PATCH 03/11] :necktie: Use `max_iter` parameter for longitudinal template generation. --- CHANGELOG.md | 3 +- CPAC/longitudinal/preproc.py | 85 ++++++++++--------- CPAC/longitudinal/wf/anat.py | 5 +- CPAC/pipeline/schema.py | 77 +++++++++++------ .../configs/pipeline_config_blank.yml | 11 +-- .../configs/pipeline_config_default.yml | 11 +-- 6 files changed, 114 insertions(+), 78 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 22ef913fa9..aa598ad632 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,7 +24,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`. - Required positional parameter "orientation" to `resolve_resolution`. - Optional positional argument "cfg" to `create_lesion_preproc`. -- Added `mri_robust_template` for longitudinal template generation. +- `mri_robust_template` for longitudinal template generation. +- `max_iter` parameter for longitudinal template generation. ### Changed diff --git a/CPAC/longitudinal/preproc.py b/CPAC/longitudinal/preproc.py index 9fbe31c6b5..4a4ebf9f62 100644 --- a/CPAC/longitudinal/preproc.py +++ b/CPAC/longitudinal/preproc.py @@ -19,7 +19,9 @@ from collections import Counter from multiprocessing.dummy import Pool as ThreadPool +from multiprocessing.pool import Pool import os +from typing import Literal, Optional import numpy as np import nibabel as nib @@ -131,27 +133,23 @@ def norm_transformation(input_mat): def template_convergence( - mat_file, mat_type="matrix", convergence_threshold=np.finfo(np.float64).eps -): + mat_file: str, + mat_type: Literal["matrix", "ITK"] = "matrix", + convergence_threshold: float | np.float64 = np.finfo(np.float64).eps, +) -> bool: """Check that the deistance between matrices is smaller than the threshold. Calculate the distance between transformation matrix with a matrix of no transformation. Parameters ---------- - mat_file : str + mat_file path to an fsl flirt matrix - mat_type : str - 'matrix'(default), 'ITK' + mat_type The type of matrix used to represent the transformations - convergence_threshold : float - (numpy.finfo(np.float64).eps (default)) threshold for the convergence + convergence_threshold The threshold is how different from no transformation is the transformation matrix. - - Returns - ------- - bool """ if mat_type == "matrix": translation, oth_transform = read_mat(mat_file) @@ -347,50 +345,51 @@ def flirt_node(in_img, output_img, output_mat): def template_creation_flirt( - input_brain_list, - input_skull_list, - init_reg=None, - avg_method="median", - dof=12, - interp="trilinear", - cost="corratio", - mat_type="matrix", - convergence_threshold=-1, - thread_pool=2, - unique_id_list=None, -): + input_brain_list: list[str], + input_skull_list: list[str], + init_reg: Optional[list[pe.Node]] = None, + avg_method: Literal["median", "mean", "std"] = "median", + dof: Literal[12, 9, 7, 6] = 12, + interp: Literal["trilinear", "nearestneighbour", "sinc", "spline"] = "trilinear", + cost: Literal[ + "corratio", "mutualinfo", "normmi", "normcorr", "leastsq", "labeldiff", "bbr" + ] = "corratio", + mat_type: Literal["matrix", "ITK"] = "matrix", + convergence_threshold: float | np.float64 = -1, + max_iter: int = 5, + thread_pool: int | Pool = 2, + unique_id_list: Optional[list[str]] = None, +) -> tuple[str, str, list[str], list[str], list[str]]: """Create a temporary template from a list of images. Parameters ---------- - input_brain_list : list of str + input_brain_list list of brain images paths - input_skull_list : list of str + input_skull_list list of skull images paths - init_reg : list of Node + init_reg (default None so no initial registration performed) the output of the function register_img_list with another reference Reuter et al. 2012 (NeuroImage) section "Improved template estimation" doi:10.1016/j.neuroimage.2012.02.084 uses a ramdomly selected image from the input dataset - avg_method : str - function names from numpy library such as 'median', 'mean', 'std' ... - dof : integer (int of long) - number of transform degrees of freedom (FLIRT) (12 by default) - interp : str - ('trilinear' (default) or 'nearestneighbour' or 'sinc' or 'spline') + avg_method + function names from numpy library + dof + number of transform degrees of freedom (FLIRT) + interp final interpolation method used in reslicing - cost : str - ('mutualinfo' or 'corratio' (default) or 'normcorr' or 'normmi' or - 'leastsq' or 'labeldiff' or 'bbr') + cost cost function - mat_type : str - 'matrix'(default), 'ITK' + mat_type The type of matrix used to represent the transformations - convergence_threshold : float + convergence_threshold (numpy.finfo(np.float64).eps (default)) threshold for the convergence The threshold is how different from no transformation is the transformation matrix. + max_iter + Maximum number of iterations if transformation does not converge thread_pool : int or multiprocessing.dummy.Pool (default 2) number of threads. You can also provide a Pool so the node will be added to it to be run. @@ -496,7 +495,14 @@ def template_creation_flirt( and the loop stops when this temporary template is close enough (with a transformation distance smaller than the threshold) to all the images of the precedent iteration. """ - while not converged: + iterator = 1 + iteration = 0 + if max_iter == -1: + # make iteration < max_iter always True + iterator = 0 + iteration = -2 + while not converged and iteration < max_iter: + iteration += iterator temporary_brain_template, temporary_skull_template = create_temporary_template( input_brain_list=output_brain_list, input_skull_list=output_skull_list, @@ -628,6 +634,7 @@ def subject_specific_template( "cost", "mat_type", "convergence_threshold", + "max_iter", "thread_pool", "unique_id_list", ], diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 94a7cb7336..2e4fc31581 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -387,8 +387,9 @@ def anat_longitudinal_wf( interp=config.longitudinal_template_generation["legacy-specific"]["interp"], cost=config.longitudinal_template_generation["legacy-specific"]["cost"], convergence_threshold=config.longitudinal_template_generation[ - "convergence_threshold" - ], + "legacy-specific" + ]["convergence_threshold"], + max_iter=config.longitudinal_template_generation["max_iter"], thread_pool=config.longitudinal_template_generation["legacy-specific"][ "thread_pool" ], diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index fe0cfe4505..f39722dafb 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -21,6 +21,7 @@ from itertools import chain, permutations import re from subprocess import CalledProcessError +from typing import Any as AnyType import numpy as np from pathvalidate import sanitize_filename @@ -851,28 +852,34 @@ def sanitize(filename): "using": In({"mri_robust_template", "C-PAC legacy"}), "average_method": In({"median", "mean", "std"}), "dof": In({12, 9, 7, 6}), - "convergence_threshold": Number, - "max_iter": int, + "max_iter": Any( + All(Number, Range(min=0, min_included=False)), In([-1, "default"]) + ), "legacy-specific": Maybe( - { - "interp": Maybe( - In({"trilinear", "nearestneighbour", "sinc", "spline"}) - ), - "cost": Maybe( - In( - { - "corratio", - "mutualinfo", - "normmi", - "normcorr", - "leastsq", - "labeldiff", - "bbr", - } - ) - ), - "thread_pool": Maybe(int), - } + Schema( + { + "convergence_threshold": Any( + All(Number, Range(min=0, max=1, min_included=False)), -1 + ), + "interp": Maybe( + In({"trilinear", "nearestneighbour", "sinc", "spline"}) + ), + "cost": Maybe( + In( + { + "corratio", + "mutualinfo", + "normmi", + "normcorr", + "leastsq", + "labeldiff", + "bbr", + } + ) + ), + "thread_pool": Maybe(int), + } + ) ), }, "functional_preproc": { @@ -1266,6 +1273,20 @@ def sanitize(filename): ) +def check_unimplemented( + to_check: dict[str, AnyType], k_v_pairs: list[tuple[str, AnyType]], category: str +) -> None: + """Check for unimplemented combinations in subschema. + + Raise NotImplementedError if any found. + """ + error_msg = "`{value}` is not implemented for {category} `{key}`." + for key, value in k_v_pairs: + if to_check[key] == value: + msg = error_msg.format(category=category, key=key, value=value) + raise NotImplementedError(msg) + + def schema(config_dict): """Validate a participant-analysis pipeline configuration. @@ -1432,11 +1453,15 @@ def schema(config_dict): # check for incompatible longitudinal options lgt = partially_validated["longitudinal_template_generation"] if lgt["using"] == "mri_robust_template": - error_msg = "{value} is not implemented for longitudinal {key} in `mri_robust_template`." - for key, value in [("average_method", "std"), ("dof", 9), ("max_iter", -1)]: - if lgt[key] == value: - msg = error_msg.format(key=key, value=value) - raise NotImplementedError(msg) + check_unimplemented( + lgt, + [("average_method", "std"), ("dof", 9), ("max_iter", -1)], + "longitudinal `mri_robust_template`", + ) + if lgt["using"] == "C-PAC legacy": + check_unimplemented( + lgt, [("max_iter", "default")], "C-PAC legacy longitudinal" + ) except KeyError: pass return partially_validated diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index e95bbeb6ae..d802b7a15f 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1528,18 +1528,19 @@ longitudinal_template_generation: # Additional option if using "C-PAC legacy": 9 (traditional) dof: 12 - # Threshold of transformation distance to consider that the loop converged - # (-1 means numpy.finfo(np.float64).eps and is the default) - convergence_threshold: -1 - # Maximum iterations # Stop after this many iterations, even if still above convergence_threshold + # Additional option if using "mri_robust_template": "default" means 5 for 2 sessions, 6 for more than 2 sessions # Additional option if using "C-PAC legacy": -1 means loop forever until reaching convergence threshold - max_iter: 5 + max_iter: default # Options for C-PAC legacy implementation that are not configurable in mri_robust_template legacy-specific: + # Threshold of transformation distance to consider that the loop converged + # (-1 means numpy.finfo(np.float64).eps and is the default) + convergence_threshold: -1 + # Interpolation parameter for FLIRT in the template creation # Options: trilinear, nearestneighbour, sinc or spline interp: diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index e22da986de..67ad5530f9 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -264,18 +264,19 @@ longitudinal_template_generation: # Additional option if using "C-PAC legacy": 9 (traditional) dof: 12 - # Threshold of transformation distance to consider that the loop converged - # (-1 means numpy.finfo(np.float64).eps and is the default) - convergence_threshold: -1 - # Maximum iterations # Stop after this many iterations, even if still above convergence_threshold + # Additional option if using "mri_robust_template": "default" means 5 for 2 sessions, 6 for more than 2 sessions # Additional option if using "C-PAC legacy": -1 means loop forever until reaching convergence threshold - max_iter: 5 + max_iter: 6 # Options for C-PAC legacy implementation that are not configurable in mri_robust_template legacy-specific: + # Threshold of transformation distance to consider that the loop converged + # (-1 means numpy.finfo(np.float64).eps and is the default) + convergence_threshold: -1 + # Interpolation parameter for FLIRT in the template creation # Options: trilinear, nearestneighbour, sinc or spline interp: trilinear From 253ca806cb8e6a603e7e53b8761603c4f84e2df3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 15 Nov 2024 13:48:06 -0500 Subject: [PATCH 04/11] :twisted_rightwards_arrows: Merge changes from #2160 --- CPAC/longitudinal/wf/anat.py | 418 ++++++++++-------- CPAC/pipeline/cpac_pipeline.py | 21 +- CPAC/pipeline/cpac_runner.py | 6 +- CPAC/pipeline/engine.py | 17 +- .../pipeline/nipype_pipeline_engine/engine.py | 57 ++- CPAC/registration/registration.py | 210 ++++----- CPAC/registration/utils.py | 16 + CPAC/seg_preproc/seg_preproc.py | 27 +- 8 files changed, 438 insertions(+), 334 deletions(-) diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index a49386b1a9..192a50034f 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -18,9 +18,11 @@ """Longitudinal workflows for anatomical data.""" import os -from typing import Optional +from typing import cast, Optional +from nipype import config as nipype_config from nipype.interfaces import fsl +from nipype.interfaces.utility import Merge from CPAC.longitudinal.preproc import subject_specific_template from CPAC.pipeline import nipype_pipeline_engine as pe @@ -31,7 +33,7 @@ connect_pipeline, initialize_nipype_wf, ) -from CPAC.pipeline.engine import ingress_output_dir, initiate_rpool +from CPAC.pipeline.engine import ingress_output_dir, initiate_rpool, ResourcePool from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN from CPAC.registration.registration import apply_transform from CPAC.utils.configuration import Configuration @@ -132,7 +134,8 @@ def mask_longitudinal_T1w_brain( ( "space-longitudinal_desc-brain_T1w", "from-longitudinal_to-template_mode-image_xfm", - ) + ), + "T1w-brain-template", ], outputs=["space-template_desc-brain_T1w"], ) @@ -169,7 +172,7 @@ def warp_longitudinal_T1w_to_template( node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") wf.connect(node, out, apply_xfm, "inputspec.input_image") - node, out = strat_pool.get_data("T1w_brain_template") + node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, apply_xfm, "inputspec.reference") node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") @@ -188,7 +191,11 @@ def warp_longitudinal_T1w_to_template( option_val="C-PAC legacy", inputs=[ ( - "from-longitudinal_to-T1w_mode-image_desc-linear_xfm", + "space-longitudinal_desc-brain_T1w", + [ + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm", + "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", + ], "space-longitudinal_label-CSF_mask", "space-longitudinal_label-GM_mask", "space-longitudinal_label-WM_mask", @@ -198,7 +205,8 @@ def warp_longitudinal_T1w_to_template( "space-longitudinal_label-CSF_probseg", "space-longitudinal_label-GM_probseg", "space-longitudinal_label-WM_probseg", - ) + ), + "T1w-brain-template", ], outputs=[ "label-CSF_mask", @@ -213,14 +221,35 @@ def warp_longitudinal_T1w_to_template( ], ) def warp_longitudinal_seg_to_T1w( - wf, cfg, strat_pool, pipe_num, opt=None + wf: pe.Workflow, + cfg: Configuration, + strat_pool: ResourcePool, + pipe_num: int, + opt: Optional[str] = None, ) -> NODEBLOCK_RETURN: """Transform anatomical images from longitudinal space template space.""" - xfm_prov = strat_pool.get_cpac_provenance( - "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" - ) - reg_tool = check_prov_for_regtool(xfm_prov) - + if strat_pool.check_rpool("from-longitudinal_to-T1w_mode-image_desc-linear_xfm"): + xfm_prov = strat_pool.get_cpac_provenance( + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" + ) + reg_tool = check_prov_for_regtool(xfm_prov) + xfm: tuple[pe.Node, str] = strat_pool.get_data( + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" + ) + else: + xfm_prov = strat_pool.get_cpac_provenance( + "from-T1w_to-longitudinal_mode-image_desc-linear_xfm" + ) + reg_tool = check_prov_for_regtool(xfm_prov) + # create inverse xfm if we don't have it + invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") + invt.inputs.invert_xfm = True + wf.connect( + *strat_pool.get_data("from-T1w_to-longitudinal_mode-image_desc-linear_xfm"), + invt, + "in_file", + ) + xfm = (invt, "out_file") num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] @@ -260,11 +289,10 @@ def warp_longitudinal_seg_to_T1w( node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") wf.connect(node, out, apply_xfm, "inputspec.input_image") - node, out = strat_pool.get_data("T1w_brain_template") + node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, apply_xfm, "inputspec.reference") - node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, "inputspec.transform") + wf.connect(*xfm, apply_xfm, "inputspec.transform") outputs[f"label-{label}"] = (apply_xfm, "outputspec.output_image") @@ -272,7 +300,7 @@ def warp_longitudinal_seg_to_T1w( def anat_longitudinal_wf( - subject_id: str, sub_list: list[dict], config: Configuration + subject_id: str, sub_list: list[dict], config: Configuration, dry_run: bool = False ) -> None: """ Create and run longitudinal workflows for anatomical data. @@ -285,21 +313,37 @@ def anat_longitudinal_wf( a list of sessions for one subject config a Configuration object containing the information for the participant pipeline + dry_run + build graph without running? """ + nipype_config.update_config( + { + "execution": { + "crashfile_format": "txt", + "stop_on_first_crash": config[ + "pipeline_setup", "system_config", "fail_fast" + ], + } + } + ) config["subject_id"] = subject_id - session_id_list: list[list] = [] + session_id_list: list[str] = [] """List of lists for every strategy""" session_wfs = {} cpac_dirs = [] - out_dir = config.pipeline_setup["output_directory"]["path"] - - orig_pipe_name = config.pipeline_setup["pipeline_name"] - - # Loop over the sessions to create the input for the longitudinal - # algorithm - for session in sub_list: - unique_id = session["unique_id"] + out_dir: str = config.pipeline_setup["output_directory"]["path"] + + orig_pipe_name: str = config.pipeline_setup["pipeline_name"] + + strats_dct: dict[str, list[tuple[pe.Node, str] | str]] = { + "desc-brain_T1w": [], + "desc-head_T1w": [], + } + for i, session in enumerate(sub_list): + # Loop over the sessions to create the input for the longitudinal algorithm + unique_id: str = session["unique_id"] + unique_id: str = str(session.get("unique_id", i)) session_id_list.append(unique_id) try: @@ -319,13 +363,12 @@ def anat_longitudinal_wf( except KeyError: input_creds_path = None - workflow = initialize_nipype_wf( + workflow: pe.Workflow = initialize_nipype_wf( config, - sub_list[0], - # just grab the first one for the name - name="anat_longitudinal_pre-preproc", + session, + name=f"anat_longitudinal_pre-preproc_{unique_id}", ) - + rpool: ResourcePool workflow, rpool = initiate_rpool(workflow, config, session) pipeline_blocks = build_anat_preproc_stack(rpool, config) workflow = connect_pipeline(workflow, config, rpool, pipeline_blocks) @@ -334,158 +377,143 @@ def anat_longitudinal_wf( rpool.gather_pipes(workflow, config) - workflow.run() - - cpac_dir = os.path.join( - out_dir, f"pipeline_{orig_pipe_name}", f"{subject_id}_{unique_id}" - ) - cpac_dirs.append(os.path.join(cpac_dir, "anat")) - - # Now we have all the anat_preproc set up for every session - # loop over the different anat preproc strategies - strats_brain_dct = {} - strats_head_dct = {} - for cpac_dir in cpac_dirs: - if os.path.isdir(cpac_dir): - for filename in os.listdir(cpac_dir): - if "T1w.nii" in filename: - for tag in filename.split("_"): - if "desc-" in tag and "brain" in tag: - if tag not in strats_brain_dct: - strats_brain_dct[tag] = [] - strats_brain_dct[tag].append( - os.path.join(cpac_dir, filename) - ) - if tag not in strats_head_dct: - strats_head_dct[tag] = [] - head_file = filename.replace(tag, "desc-reorient") - strats_head_dct[tag].append( - os.path.join(cpac_dir, head_file) - ) - - for strat in strats_brain_dct.keys(): - wf = initialize_nipype_wf( - config, - sub_list[0], - # just grab the first one for the name - name=f"template_node_{strat}", - ) + for key in strats_dct.keys(): + strats_dct[key].append(cast(tuple[pe.Node, str], rpool.get_data(key))) + if not dry_run: + workflow.run() + for key in strats_dct.keys(): # get the outputs from run-nodes + for index, data in enumerate(list(strats_dct[key])): + if isinstance(data, tuple): + strats_dct[key][index] = workflow.get_output_path(*data) + + wf = initialize_nipype_wf( + config, + sub_list[0], + # just grab the first one for the name + name="template_node_brain", + ) - config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + + template_node_name = "longitudinal_anat_template_brain" + + # This node will generate the longitudinal template (the functions are + # in longitudinal_preproc) + # Later other algorithms could be added to calculate it, like the + # multivariate template from ANTS + # It would just require to change it here. + template_node = subject_specific_template(workflow_name=template_node_name) + + template_node.inputs.set( + avg_method=config.longitudinal_template_generation["average_method"], + dof=config.longitudinal_template_generation["dof"], + interp=config.longitudinal_template_generation["interp"], + cost=config.longitudinal_template_generation["cost"], + convergence_threshold=config.longitudinal_template_generation[ + "convergence_threshold" + ], + thread_pool=config.longitudinal_template_generation["thread_pool"], + unique_id_list=list(session_wfs.keys()), + ) - template_node_name = f"longitudinal_anat_template_{strat}" - - # This node will generate the longitudinal template (the functions are - # in longitudinal_preproc) - # Later other algorithms could be added to calculate it, like the - # multivariate template from ANTS - # It would just require to change it here. - template_node = subject_specific_template(workflow_name=template_node_name) - - template_node.inputs.set( - avg_method=config.longitudinal_template_generation["average_method"], - dof=config.longitudinal_template_generation["dof"], - interp=config.longitudinal_template_generation["legacy-specific"]["interp"], - cost=config.longitudinal_template_generation["legacy-specific"]["cost"], - convergence_threshold=config.longitudinal_template_generation[ - "legacy-specific" - ]["convergence_threshold"], - max_iter=config.longitudinal_template_generation["max_iter"], - thread_pool=config.longitudinal_template_generation["legacy-specific"][ - "thread_pool" - ], - unique_id_list=list(session_wfs.keys()), - ) + num_sessions = len(strats_dct["desc-brain_T1w"]) + merge_brains = pe.Node(Merge(num_sessions), name="merge_brains") + merge_skulls = pe.Node(Merge(num_sessions), name="merge_skulls") - template_node.inputs.input_brain_list = strats_brain_dct[strat] - template_node.inputs.input_skull_list = strats_head_dct[strat] + for i in list(range(0, num_sessions)): + wf._connect_node_or_path(merge_brains, strats_dct, "desc-brain_T1w", i) + wf._connect_node_or_path(merge_skulls, strats_dct, "desc-head_T1w", i) + wf.connect(merge_brains, "out", template_node, "input_brain_list") + wf.connect(merge_skulls, "out", template_node, "input_skull_list") - long_id = f"longitudinal_{subject_id}_strat-{strat}" + long_id = f"longitudinal_{subject_id}_strat-desc-brain_T1w" - wf, rpool = initiate_rpool(wf, config, part_id=long_id) + wf, rpool = initiate_rpool(wf, config, part_id=long_id) - rpool.set_data( - "space-longitudinal_desc-brain_T1w", - template_node, - "brain_template", - {}, - "", - template_node_name, - ) + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) - rpool.set_data( - "space-longitudinal_desc-brain_T1w-template", - template_node, - "brain_template", - {}, - "", - template_node_name, - ) + rpool.set_data( + "space-longitudinal_desc-brain_T1w-template", + template_node, + "brain_template", + {}, + "", + template_node_name, + ) - rpool.set_data( - "space-longitudinal_desc-reorient_T1w", - template_node, - "skull_template", - {}, - "", - template_node_name, - ) + rpool.set_data( + "space-longitudinal_desc-reorient_T1w", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) - rpool.set_data( - "space-longitudinal_desc-reorient_T1w-template", - template_node, - "skull_template", - {}, - "", - template_node_name, - ) + rpool.set_data( + "space-longitudinal_desc-reorient_T1w-template", + template_node, + "skull_template", + {}, + "", + template_node_name, + ) - pipeline_blocks = [mask_longitudinal_T1w_brain] + pipeline_blocks = [mask_longitudinal_T1w_brain] - pipeline_blocks = build_T1w_registration_stack(rpool, config, pipeline_blocks) + pipeline_blocks = build_T1w_registration_stack( + rpool, config, pipeline_blocks, space="longitudinal" + ) - pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) + pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) - wf = connect_pipeline(wf, config, rpool, pipeline_blocks) + wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - excl = [ - "space-longitudinal_desc-brain_T1w", - "space-longitudinal_desc-reorient_T1w", - "space-longitudinal_desc-brain_mask", - ] - rpool.gather_pipes(wf, config, add_excl=excl) + excl = [ + "space-longitudinal_desc-brain_T1w", + "space-longitudinal_desc-reorient_T1w", + "space-longitudinal_desc-brain_mask", + ] + rpool.gather_pipes(wf, config, add_excl=excl) - # this is going to run multiple times! - # once for every strategy! + if not dry_run: wf.run() - # now, just write out a copy of the above to each session - config.pipeline_setup["pipeline_name"] = orig_pipe_name - for session in sub_list: - unique_id = session["unique_id"] - - try: - creds_path = session["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" ' - 'session "%s" was not found. Check this path ' - "and try again." % (creds_path, subject_id, unique_id) - ) - raise Exception(err_msg) + # now, just write out a copy of the above to each session + config.pipeline_setup["pipeline_name"] = orig_pipe_name + for session in sub_list: + unique_id = session["unique_id"] + + try: + creds_path = session["creds_path"] + if creds_path and "none" not in creds_path.lower(): + if os.path.exists(creds_path): + input_creds_path = os.path.abspath(creds_path) else: - input_creds_path = None - except KeyError: + err_msg = ( + 'Credentials path: "%s" for subject "%s" ' + 'session "%s" was not found. Check this path ' + "and try again." % (creds_path, subject_id, unique_id) + ) + raise Exception(err_msg) + else: input_creds_path = None + except KeyError: + input_creds_path = None - wf = initialize_nipype_wf(config, sub_list[0]) + wf = initialize_nipype_wf(config, sub_list[0]) - wf, rpool = initiate_rpool(wf, config, session) + wf, rpool = initiate_rpool(wf, config, session, rpool=rpool) - config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + if "derivatives_dir" in session: rpool = ingress_output_dir( wf, config, @@ -497,42 +525,43 @@ def anat_longitudinal_wf( creds_path=input_creds_path, ) - select_node_name = f"select_{unique_id}" - select_sess = pe.Node( - Function( - input_names=["session", "output_brains", "warps"], - output_names=["brain_path", "warp_path"], - function=select_session, - ), - name=select_node_name, - ) - select_sess.inputs.session = unique_id - - wf.connect(template_node, "output_brain_list", select_sess, "output_brains") - wf.connect(template_node, "warp_list", select_sess, "warps") - - rpool.set_data( - "space-longitudinal_desc-brain_T1w", - select_sess, - "brain_path", - {}, - "", - select_node_name, - ) + select_node_name = f"FSL_select_{unique_id}" + select_sess = pe.Node( + Function( + input_names=["session", "output_brains", "warps"], + output_names=["brain_path", "warp_path"], + function=select_session, + ), + name=select_node_name, + ) + select_sess.inputs.session = unique_id - rpool.set_data( - "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", - select_sess, - "warp_path", - {}, - "", - select_node_name, - ) + wf.connect(template_node, "output_brain_list", select_sess, "output_brains") + wf.connect(template_node, "warp_list", select_sess, "warps") + + rpool.set_data( + "space-longitudinal_desc-brain_T1w", + select_sess, + "brain_path", + {}, + "", + select_node_name, + ) + + rpool.set_data( + "from-T1w_to-longitudinal_mode-image_" "desc-linear_xfm", + select_sess, + "warp_path", + {}, + "", + select_node_name, + ) - config.pipeline_setup["pipeline_name"] = orig_pipe_name - excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] + config.pipeline_setup["pipeline_name"] = orig_pipe_name + excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] - rpool.gather_pipes(wf, config, add_excl=excl) + rpool.gather_pipes(wf, config, add_excl=excl) + if not dry_run: wf.run() # begin single-session stuff again @@ -571,4 +600,5 @@ def anat_longitudinal_wf( # this is going to run multiple times! # once for every strategy! - wf.run() + if not dry_run: + wf.run() diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 26f67c970f..ae0a52121b 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -25,6 +25,7 @@ import sys import time from time import strftime +from typing import Literal, Optional import yaml import nipype @@ -130,7 +131,7 @@ # pylint: disable=wrong-import-order from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.check_outputs import check_outputs -from CPAC.pipeline.engine import initiate_rpool, NodeBlock +from CPAC.pipeline.engine import initiate_rpool, NodeBlock, ResourcePool from CPAC.pipeline.nipype_pipeline_engine.plugins import ( LegacyMultiProcPlugin, MultiProcPlugin, @@ -162,7 +163,6 @@ warp_deriv_mask_to_EPItemplate, warp_deriv_mask_to_T1template, warp_sbref_to_T1template, - warp_T1mask_to_template, warp_timeseries_to_EPItemplate, warp_timeseries_to_T1template, warp_timeseries_to_T1template_abcd, @@ -170,7 +170,7 @@ warp_timeseries_to_T1template_deriv, warp_tissuemask_to_EPItemplate, warp_tissuemask_to_T1template, - warp_wholeheadT1_to_template, + warp_to_template, ) from CPAC.reho.reho import reho, reho_space_template from CPAC.sca.sca import dual_regression, multiple_regression, SCA_AVG @@ -1061,25 +1061,30 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): return pipeline_blocks -def build_T1w_registration_stack(rpool, cfg, pipeline_blocks=None): +def build_T1w_registration_stack( + rpool: ResourcePool, + cfg: Configuration, + pipeline_blocks: Optional[list] = None, + space: Literal["longitudinal", "T1w"] = "T1w", +): """Build the T1w registration pipeline blocks.""" if not pipeline_blocks: pipeline_blocks = [] reg_blocks = [] - if not rpool.check_rpool("from-T1w_to-template_mode-image_xfm"): + if not rpool.check_rpool(f"from-{space}_to-template_mode-image_xfm"): reg_blocks = [ [register_ANTs_anat_to_template, register_FSL_anat_to_template], overwrite_transform_anat_to_template, - warp_wholeheadT1_to_template, - warp_T1mask_to_template, + warp_to_template("wholehead", space), + warp_to_template("mask", space), ] if not rpool.check_rpool("desc-restore-brain_T1w"): reg_blocks.append(correct_restore_brain_intensity_abcd) if cfg.voxel_mirrored_homotopic_connectivity["run"]: - if not rpool.check_rpool("from-T1w_to-symtemplate_mode-image_xfm"): + if not rpool.check_rpool(f"from-{space}_to-symtemplate_mode-image_xfm"): reg_blocks.append( [ register_symmetric_ANTs_anat_to_template, diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index 6dc1241036..9f09623f69 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -236,7 +236,7 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir): f.write(pid) -def run_T1w_longitudinal(sublist, cfg): +def run_T1w_longitudinal(sublist, cfg: Configuration, dry_run: bool = False): subject_id_dict = {} for sub in sublist: @@ -249,7 +249,7 @@ def run_T1w_longitudinal(sublist, cfg): # sessions for each participant as value for subject_id, sub_list in subject_id_dict.items(): if len(sub_list) > 1: - anat_longitudinal_wf(subject_id, sub_list, cfg) + anat_longitudinal_wf(subject_id, sub_list, cfg, dry_run=dry_run) elif len(sub_list) == 1: warnings.warn( "\n\nThere is only one anatomical session " @@ -495,7 +495,7 @@ def run( hasattr(c, "longitudinal_template_generation") and c.longitudinal_template_generation["run"] ): - run_T1w_longitudinal(sublist, c) + run_T1w_longitudinal(sublist, c, dry_run=test_config) # TODO functional longitudinal pipeline """ diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index f99749c94b..922b34c682 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1229,7 +1229,11 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): unlabelled.remove(key) # del all_forks for pipe_idx in self.rpool[resource]: - pipe_x = self.get_pipe_number(pipe_idx) + try: + pipe_x = self.get_pipe_number(pipe_idx) + except ValueError: + # already gone + continue json_info = self.rpool[resource][pipe_idx]["json"] out_dct = self.rpool[resource][pipe_idx]["out"] @@ -2623,7 +2627,14 @@ def _set_nested(attr, keys): return wf, rpool -def initiate_rpool(wf, cfg, data_paths=None, part_id=None): +def initiate_rpool( + wf: pe.Workflow, + cfg: Configuration, + data_paths=None, + part_id=None, + *, + rpool: Optional[ResourcePool] = None, +): """ Initialize a new ResourcePool. @@ -2662,7 +2673,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): unique_id = part_id creds_path = None - rpool = ResourcePool(name=unique_id, cfg=cfg) + rpool = ResourcePool(rpool=rpool.rpool if rpool else None, name=unique_id, cfg=cfg) if data_paths: # ingress outdir diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 743285ae9d..78eda7dca3 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -8,6 +8,7 @@ # * Applies a random seed # * Supports overriding memory estimates via a log file and a buffer # * Adds quotation marks around strings in dotfiles +# * Adds methods for cross-graph connections # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers @@ -50,16 +51,18 @@ for Nipype's documentation. """ # pylint: disable=line-too-long +from collections.abc import Mapping from copy import deepcopy from inspect import Parameter, Signature, signature import os import re -from typing import Any, ClassVar, Optional +from typing import Any, ClassVar, Optional, TYPE_CHECKING from numpy import prod from traits.trait_base import Undefined from traits.trait_handlers import TraitListObject from nibabel import load +from nipype.interfaces.base.support import InterfaceResult from nipype.interfaces.utility import Function from nipype.pipeline import engine as pe from nipype.pipeline.engine.utils import ( @@ -76,6 +79,9 @@ from CPAC.utils.monitoring import getLogger, WFLOGGER +if TYPE_CHECKING: + pass + # set global default mem_gb DEFAULT_MEM_GB = 2.0 UNDEFINED_SIZE = (42, 42, 42, 1200) @@ -527,6 +533,25 @@ def __init__(self, name, base_dir=None, debug=False): self._nodes_cache = set() self._nested_workflows_cache = set() + def copy_input_connections(self, node1: pe.Node, node2: pe.Node) -> None: + """Copy input connections from ``node1`` to ``node2``.""" + new_connections: list[tuple[pe.Node, str, pe.Node, str]] = [] + for connection in self._graph.edges: + _out: pe.Node + _in: pe.Node + _out, _in = connection + if _in == node1: + details = self._graph.get_edge_data(*connection) + if "connect" in details: + for connect in details["connect"]: + new_connections.append((_out, connect[0], node2, connect[1])) + for connection in new_connections: + try: + self.connect(*connection) + except Exception: + # connection already exists + continue + def _configure_exec_nodes(self, graph): """Ensure that each node knows where to get inputs from.""" for node in graph.nodes(): @@ -565,6 +590,20 @@ def _configure_exec_nodes(self, graph): except (FileNotFoundError, KeyError, TypeError): self._handle_just_in_time_exception(node) + def _connect_node_or_path( + self, + node: pe.Node, + strats_dct: Mapping[str, list[tuple[pe.Node, str] | str]], + key: str, + index: int, + ) -> None: + """Set input appropriately for either a Node or a path string.""" + _input: str = f"in{index + 1}" + if isinstance(strats_dct[key][index], str): + setattr(node.inputs, _input, strats_dct[key][index]) + else: + self.connect(*strats_dct[key][index], node, _input) + def _get_dot( self, prefix=None, hierarchy=None, colored=False, simple_form=True, level=0 ): @@ -678,6 +717,22 @@ def _get_dot( WFLOGGER.debug("cross connection: %s", dotlist[-1]) return ("\n" + prefix).join(dotlist) + def get_output_path(self, node: pe.Node, out: str) -> str: + """Get an output path from an already-run Node.""" + try: + _run_node: pe.Node = next( + iter( + _ + for _ in self.run(updatehash=True).nodes + if _.fullname == node.fullname + ) + ) + except IndexError as index_error: + msg = f"Could not find {node.fullname} in {self}'s run Nodes." + raise LookupError(msg) from index_error + _res: InterfaceResult = _run_node.run() + return getattr(_res.outputs, out) + def _handle_just_in_time_exception(self, node): # pylint: disable=protected-access if hasattr(self, "_local_func_scans"): diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1c6b6fa71a..c10a60f39d 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -17,7 +17,7 @@ # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order """Workflows for registration.""" -from typing import Optional +from typing import Literal, Optional, TYPE_CHECKING from voluptuous import RequiredFieldInvalid from nipype.interfaces import afni, ants, c3, fsl, utility as util @@ -26,7 +26,7 @@ from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock +from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN, NodeBlockFunction from CPAC.registration.utils import ( change_itk_transform_type, check_transforms, @@ -34,6 +34,7 @@ hardcoded_reg, interpolation_string, one_d_to_mat, + prepend_space, run_c3d, run_c4d, seperate_warps_list, @@ -43,6 +44,10 @@ from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool +if TYPE_CHECKING: + from CPAC.pipeline.engine import ResourcePool + from CPAC.utils.configuration import Configuration + def apply_transform( wf_name, @@ -2656,14 +2661,13 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, ants_rc, "inputspec.lesion_mask") if "space-longitudinal" in brain: - for key in outputs: + for key in list(outputs.keys()): for direction in ["from", "to"]: if f"{direction}-T1w" in key: new_key = key.replace( f"{direction}-T1w", f"{direction}-longitudinal" ) outputs[new_key] = outputs[key] - del outputs[key] return (wf, outputs) @@ -3849,115 +3853,115 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None) return (wf, outputs) -@nodeblock( - name="transform_whole_head_T1w_to_T1template", - config=["registration_workflows", "anatomical_registration"], - switch=["run"], - inputs=[ - ( - "desc-head_T1w", - "from-T1w_to-template_mode-image_xfm", - "space-template_desc-head_T1w", - ), - "T1w-template", - ], - outputs={"space-template_desc-head_T1w": {"Template": "T1w-template"}}, -) -def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - """Warp T1 head to template.""" - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) - - num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - - num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - - apply_xfm = apply_transform( - f"warp_wholehead_T1w_to_T1template_{pipe_num}", - reg_tool, - time_series=False, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores, - ) - - if reg_tool == "ants": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "functional_registration" - ]["func_registration_to_template"]["ANTs_pipelines"]["interpolation"] - elif reg_tool == "fsl": - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - "functional_registration" - ]["func_registration_to_template"]["FNIRT_pipelines"]["interpolation"] - - connect = strat_pool.get_data("desc-head_T1w") - node, out = connect - wf.connect(node, out, apply_xfm, "inputspec.input_image") - - node, out = strat_pool.get_data("T1w-template") - wf.connect(node, out, apply_xfm, "inputspec.reference") +def warp_to_template( + warp_what: Literal["mask", "wholehead"], space_from: Literal["longitudinal", "T1w"] +) -> NodeBlockFunction: + """Get a NodeBlockFunction to transform a resource from ``space`` to template. - node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, "inputspec.transform") - - outputs = {"space-template_desc-head_T1w": (apply_xfm, "outputspec.output_image")} + The resource being warped needs to be the first list or string in the tuple + in the first position of the decorator's "inputs". + """ + _decorators = { + "mask": { + "name": f"transform_{space_from}-mask_to_T1-template", + "switch": [ + ["registration_workflows", "anatomical_registration", "run"], + ["anatomical_preproc", "run"], + ["anatomical_preproc", "brain_extraction", "run"], + ], + "inputs": [ + ( + f"space-{space_from}_desc-brain_mask", + f"from-{space_from}_to-template_mode-image_xfm", + ), + "T1w-template", + ], + "outputs": {"space-template_desc-brain_mask": {"Template": "T1w-template"}}, + }, + "wholehead": { + "name": f"transform_wholehead_{space_from}_to_T1template", + "config": ["registration_workflows", "anatomical_registration"], + "switch": ["run"], + "inputs": [ + ( + ["desc-head_T1w", "desc-reorient_T1w"], + [ + f"from-{space_from}_to-template_mode-image_xfm", + f"from-{space_from}_to-template_mode-image_xfm", + ], + "space-template_desc-head_T1w", + ), + "T1w-template", + ], + "outputs": {"space-template_desc-head_T1w": {"Template": "T1w-template"}}, + }, + } + if space_from != "T1w": + _decorators[warp_what]["inputs"][0] = ( + prepend_space(_decorators[warp_what]["inputs"][0][0], space_from), + *_decorators[warp_what]["inputs"][0][1:], + ) - return (wf, outputs) + @nodeblock(**_decorators[warp_what]) + def warp_to_template_fxn( + wf: pe.Workflow, + cfg: "Configuration", + strat_pool: "ResourcePool", + pipe_num: int, + opt: Optional[str] = None, + ) -> NODEBLOCK_RETURN: + """Transform a resource to template space.""" + xfm_prov = strat_pool.get_cpac_provenance( + f"from-{space_from}_to-template_mode-image_xfm" + ) + reg_tool = check_prov_for_regtool(xfm_prov) + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] -@nodeblock( - name="transform_T1mask_to_T1template", - switch=[ - ["registration_workflows", "anatomical_registration", "run"], - ["anatomical_preproc", "run"], - ["anatomical_preproc", "brain_extraction", "run"], - ], - inputs=[ - ("space-T1w_desc-brain_mask", "from-T1w_to-template_mode-image_xfm"), - "T1w-template", - ], - outputs={"space-template_desc-brain_mask": {"Template": "T1w-template"}}, -) -def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None): - """Warp T1 mask to template.""" - xfm_prov = strat_pool.get_cpac_provenance("from-T1w_to-template_mode-image_xfm") - reg_tool = check_prov_for_regtool(xfm_prov) + num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] + apply_xfm = apply_transform( + f"warp_{space_from}{warp_what}_to_T1template_{pipe_num}", + reg_tool, + time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores, + ) - num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] + if warp_what == "mask": + apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor" + else: + tool = ( + "ANTs" if reg_tool == "ants" else "FNIRT" if reg_tool == "fsl" else None + ) + if not tool: + msg = f"Warp {warp_what} to template not implemented for {reg_tool}." + raise NotImplementedError(msg) + apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ + "functional_registration" + ]["func_registration_to_template"][f"{tool}_pipelines"]["interpolation"] - apply_xfm = apply_transform( - f"warp_T1mask_to_T1template_{pipe_num}", - reg_tool, - time_series=False, - num_cpus=num_cpus, - num_ants_cores=num_ants_cores, - ) + # the resource being warped needs to be inputs[0][0] for this + node, out = strat_pool.get_data(_decorators[warp_what]["inputs"][0][0]) + wf.connect(node, out, apply_xfm, "inputspec.input_image") - apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor" - """ - if reg_tool == 'ants': - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'ANTs_pipelines']['interpolation'] - elif reg_tool == 'fsl': - apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[ - 'functional_registration']['func_registration_to_template'][ - 'FNIRT_pipelines']['interpolation'] - """ - connect = strat_pool.get_data("space-T1w_desc-brain_mask") - node, out = connect - wf.connect(node, out, apply_xfm, "inputspec.input_image") + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, apply_xfm, "inputspec.reference") - node, out = strat_pool.get_data("T1w-template") - wf.connect(node, out, apply_xfm, "inputspec.reference") + node, out = strat_pool.get_data(f"from-{space_from}_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm, "inputspec.transform") - node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") - wf.connect(node, out, apply_xfm, "inputspec.transform") + outputs = { + # there's only one output, so that's what we give here + next(iter(_decorators[warp_what]["outputs"].keys())): ( + apply_xfm, + "outputspec.output_image", + ) + } - outputs = {"space-template_desc-brain_mask": (apply_xfm, "outputspec.output_image")} + return wf, outputs - return (wf, outputs) + return warp_to_template_fxn @nodeblock( @@ -5416,8 +5420,8 @@ def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, template_spa def warp_resource_to_template( wf: pe.Workflow, - cfg, - strat_pool, + cfg: "Configuration", + strat_pool: "ResourcePool", pipe_num: int, input_resource: list[str] | str, xfm: str, diff --git a/CPAC/registration/utils.py b/CPAC/registration/utils.py index 4e0dc4421e..64bcb23884 100644 --- a/CPAC/registration/utils.py +++ b/CPAC/registration/utils.py @@ -18,6 +18,7 @@ import os import subprocess +from typing import overload import numpy as np from voluptuous import RequiredFieldInvalid @@ -808,3 +809,18 @@ def run_c4d(input_name, output_name): os.system(cmd) return output1, output2, output3 + + +@overload +def prepend_space(resource: list[str], space: str) -> list[str]: ... +@overload +def prepend_space(resource: str, space: str) -> str: ... +def prepend_space(resource: str | list[str], space: str) -> str | list[str]: + """Given a resource or list of resources, return same but with updated space.""" + if isinstance(resource, list): + return [prepend_space(_, space) for _ in resource] + if "space" not in resource: + return f"space-{space}_{resource}" + pre, post = resource.split("space-") + _old_space, post = post.split("_", 1) + return f"space-{space}_".join([pre, post]) diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index f769cf14b3..3051943314 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2023 C-PAC Developers +# Copyright (C) 2012-2024 C-PAC Developers # This file is part of C-PAC. @@ -507,27 +507,10 @@ def create_seg_preproc_antsJointLabel_method(wf_name="seg_preproc_templated_base "WM-path", ], outputs=[ - "label-CSF_mask", - "label-GM_mask", - "label-WM_mask", - "label-CSF_desc-preproc_mask", - "label-GM_desc-preproc_mask", - "label-WM_desc-preproc_mask", - "label-CSF_probseg", - "label-GM_probseg", - "label-WM_probseg", - "label-CSF_pveseg", - "label-GM_pveseg", - "label-WM_pveseg", - "space-longitudinal_label-CSF_mask", - "space-longitudinal_label-GM_mask", - "space-longitudinal_label-WM_mask", - "space-longitudinal_label-CSF_desc-preproc_mask", - "space-longitudinal_label-GM_desc-preproc_mask", - "space-longitudinal_label-WM_desc-preproc_mask", - "space-longitudinal_label-CSF_probseg", - "space-longitudinal_label-GM_probseg", - "space-longitudinal_label-WM_probseg", + f"{long}label-{tissue}_{entity}" + for long in ["", "space-longitudinal_"] + for tissue in ["CSF", "GM", "WM"] + for entity in ["mask", "desc-preproc_mask", "probseg", "pveseg"] ], ) def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): From c11c88f9ac5ebef86db580c30d7e35fb955014e3 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 15 Nov 2024 16:28:03 -0500 Subject: [PATCH 05/11] :bug: Debug changes for longitudinal pipeline --- CPAC/longitudinal/preproc.py | 9 +++++- CPAC/longitudinal/wf/anat.py | 29 ++++++++++--------- CPAC/pipeline/cpac_pipeline.py | 21 +++++++++----- CPAC/pipeline/engine.py | 2 ++ .../pipeline/nipype_pipeline_engine/engine.py | 4 +-- CPAC/pipeline/test/test_engine.py | 20 ++++++++----- CPAC/registration/registration.py | 6 ++-- CPAC/seg_preproc/seg_preproc.py | 4 +-- CPAC/utils/interfaces/function/function.py | 4 ++- 9 files changed, 61 insertions(+), 38 deletions(-) diff --git a/CPAC/longitudinal/preproc.py b/CPAC/longitudinal/preproc.py index 4a4ebf9f62..2dc2be6d36 100644 --- a/CPAC/longitudinal/preproc.py +++ b/CPAC/longitudinal/preproc.py @@ -344,6 +344,13 @@ def flirt_node(in_img, output_img, output_mat): return node_list +@Function.sig_imports( + [ + "from multiprocessing.pool import Pool", + "from typing import Literal, Optional", + "from nipype.pipeline import engine as pe", + ] +) def template_creation_flirt( input_brain_list: list[str], input_skull_list: list[str], @@ -615,7 +622,7 @@ def subject_specific_template( "from collections import Counter", "from multiprocessing.dummy import Pool as ThreadPool", "from nipype.interfaces.fsl import ConvertXFM", - "from CPAC.longitudinal_pipeline.longitudinal_preproc import (" + "from CPAC.longitudinal.preproc import (" " create_temporary_template," " register_img_list," " template_convergence" diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 192a50034f..5b6bc35a5d 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -340,10 +340,9 @@ def anat_longitudinal_wf( "desc-brain_T1w": [], "desc-head_T1w": [], } - for i, session in enumerate(sub_list): + for session in sub_list: # Loop over the sessions to create the input for the longitudinal algorithm unique_id: str = session["unique_id"] - unique_id: str = str(session.get("unique_id", i)) session_id_list.append(unique_id) try: @@ -365,8 +364,9 @@ def anat_longitudinal_wf( workflow: pe.Workflow = initialize_nipype_wf( config, - session, - name=f"anat_longitudinal_pre-preproc_{unique_id}", + subject_id, + unique_id, + name="anat_longitudinal_pre-preproc", ) rpool: ResourcePool workflow, rpool = initiate_rpool(workflow, config, session) @@ -388,8 +388,7 @@ def anat_longitudinal_wf( wf = initialize_nipype_wf( config, - sub_list[0], - # just grab the first one for the name + subject_id, name="template_node_brain", ) @@ -407,12 +406,14 @@ def anat_longitudinal_wf( template_node.inputs.set( avg_method=config.longitudinal_template_generation["average_method"], dof=config.longitudinal_template_generation["dof"], - interp=config.longitudinal_template_generation["interp"], - cost=config.longitudinal_template_generation["cost"], + interp=config.longitudinal_template_generation["legacy-specific"]["interp"], + cost=config.longitudinal_template_generation["legacy-specific"]["cost"], convergence_threshold=config.longitudinal_template_generation[ - "convergence_threshold" + "legacy-specific" + ]["convergence_threshold"], + thread_pool=config.longitudinal_template_generation["legacy-specific"][ + "thread_pool" ], - thread_pool=config.longitudinal_template_generation["thread_pool"], unique_id_list=list(session_wfs.keys()), ) @@ -467,7 +468,7 @@ def anat_longitudinal_wf( ) pipeline_blocks = [mask_longitudinal_T1w_brain] - + # breakpoint() pipeline_blocks = build_T1w_registration_stack( rpool, config, pipeline_blocks, space="longitudinal" ) @@ -508,7 +509,7 @@ def anat_longitudinal_wf( except KeyError: input_creds_path = None - wf = initialize_nipype_wf(config, sub_list[0]) + wf = initialize_nipype_wf(config, subject_id, unique_id) wf, rpool = initiate_rpool(wf, config, session, rpool=rpool) @@ -549,7 +550,7 @@ def anat_longitudinal_wf( ) rpool.set_data( - "from-T1w_to-longitudinal_mode-image_" "desc-linear_xfm", + "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", select_sess, "warp_path", {}, @@ -585,7 +586,7 @@ def anat_longitudinal_wf( except KeyError: input_creds_path = None - wf = initialize_nipype_wf(config, sub_list[0]) + wf = initialize_nipype_wf(config, subject_id, unique_id) wf, rpool = initiate_rpool(wf, config, session) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index ae0a52121b..00c40050be 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -856,14 +856,19 @@ def remove_workdir(wdpath: str) -> None: FMLOGGER.warning("Could not remove working directory %s", wdpath) -def initialize_nipype_wf(cfg, sub_data_dct, name=""): +def initialize_nipype_wf( + cfg: Configuration, + subject: str, + session: Optional[str] = None, + name: Optional[str] = None, +) -> pe.Workflow: """Initialize a new nipype workflow.""" - if name: - name = f"_{name}" + name = f"_{name}" if name else "" - workflow_name = ( - f'cpac{name}_{sub_data_dct["subject_id"]}_{sub_data_dct["unique_id"]}' - ) + identifier = subject + if session: + identifier = "_".join([identifier, session]) + workflow_name = f"cpac{name}_{identifier}" wf = pe.Workflow(name=workflow_name) wf.base_dir = cfg.pipeline_setup["working_directory"]["path"] wf.config["execution"] = { @@ -1227,7 +1232,9 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None): from CPAC.utils.datasource import gather_extraction_maps # Workflow setup - wf = initialize_nipype_wf(cfg, sub_dict, name=pipeline_name) + wf = initialize_nipype_wf( + cfg, sub_dict["subject_id"], sub_dict.get("unique_id", None), name=pipeline_name + ) # Extract credentials path if it exists try: diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 922b34c682..2e7b8987d6 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1368,6 +1368,8 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): except OSError as os_error: WFLOGGER.warning(os_error) continue + except AttributeError: + breakpoint() write_json_imports = ["import os", "import json"] write_json = pe.Node( diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 78eda7dca3..554e6a657c 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -51,7 +51,7 @@ for Nipype's documentation. """ # pylint: disable=line-too-long -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from copy import deepcopy from inspect import Parameter, Signature, signature import os @@ -593,7 +593,7 @@ def _configure_exec_nodes(self, graph): def _connect_node_or_path( self, node: pe.Node, - strats_dct: Mapping[str, list[tuple[pe.Node, str] | str]], + strats_dct: Mapping[str, Sequence[tuple[pe.Node, str] | str]], key: str, index: int, ) -> None: diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index cf85f50dbe..2c079e83b7 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -27,11 +27,11 @@ def test_ingress_func_raw_data(pipe_config, bids_dir, test_dir): cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") - wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct["subject_id"] ses_id = sub_data_dct["unique_id"] + wf = initialize_nipype_wf(cfg, part_id, ses_id) + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) @@ -54,11 +54,11 @@ def test_ingress_anat_raw_data(pipe_config, bids_dir, test_dir): cfg.pipeline_setup["output_directory"]["path"] = os.path.join(test_dir, "out") cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") - wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct["subject_id"] ses_id = sub_data_dct["unique_id"] + wf = initialize_nipype_wf(cfg, part_id, ses_id) + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) @@ -81,11 +81,11 @@ def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir): cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") - wf = initialize_nipype_wf(cfg, sub_data_dct) - part_id = sub_data_dct["subject_id"] ses_id = sub_data_dct["unique_id"] + wf = initialize_nipype_wf(cfg, part_id, ses_id) + unique_id = f"{part_id}_{ses_id}" rpool = ResourcePool(name=unique_id, cfg=cfg) @@ -106,7 +106,9 @@ def test_build_anat_preproc_stack(pipe_config, bids_dir, test_dir): cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") - wf = initialize_nipype_wf(cfg, sub_data_dct) + wf = initialize_nipype_wf( + cfg, sub_data_dct["subject_id"], sub_data_dct["unique_id"] + ) wf, rpool = initiate_rpool(wf, cfg, sub_data_dct) @@ -127,7 +129,9 @@ def test_build_workflow(pipe_config, bids_dir, test_dir): cfg.pipeline_setup["working_directory"]["path"] = os.path.join(test_dir, "work") cfg.pipeline_setup["log_directory"]["path"] = os.path.join(test_dir, "logs") - wf = initialize_nipype_wf(cfg, sub_data_dct) + wf = initialize_nipype_wf( + cfg, sub_data_dct["subject_id"], sub_data_dct["unique_id"] + ) wf, rpool = initiate_rpool(wf, cfg, sub_data_dct) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index c10a60f39d..b8456393d4 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2783,15 +2783,14 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= wf.connect(node, out, ants, "inputspec.lesion_mask") if "space-longitudinal" in brain: - for key in outputs.keys(): + for key in list(outputs.keys()): if "from-T1w" in key: new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] + if "to-T1w" in key: new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] return (wf, outputs) @@ -5414,6 +5413,7 @@ def warp_tissuemask_to_template(wf, cfg, strat_pool, pipe_num, xfm, template_spa "outputspec.output_image", ) for tissue in tissue_types + if apply_xfm[tissue] } return _warp_return(wf, apply_xfm, outputs) diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index 3051943314..ed99addc6e 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -577,8 +577,8 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): if use_priors: xfm = "from-template_to-T1w_mode-image_desc-linear_xfm" - if "space-longitudinal" in resource: - xfm = "from-template_to-longitudinal_mode-image_desc-linear_xfm" + # if "space-longitudinal" in resource: + # xfm = "from-template_to-longitudinal_mode-image_desc-linear_xfm" xfm_prov = strat_pool.get_cpac_provenance(xfm) reg_tool = check_prov_for_regtool(xfm_prov) else: diff --git a/CPAC/utils/interfaces/function/function.py b/CPAC/utils/interfaces/function/function.py index 34d01373d5..8295be4413 100644 --- a/CPAC/utils/interfaces/function/function.py +++ b/CPAC/utils/interfaces/function/function.py @@ -110,7 +110,9 @@ def get_function_name_from_source(function_source: str) -> str: def create_function_from_source( - function_source: str, imports: Optional[list[str]] = None, ns: Optional[dict] = None + function_source: str | bytes, + imports: Optional[list[str]] = None, + ns: Optional[dict] = None, ): """Return a function object from a function source. From af271653375d28216c4b5f322833efc2d76bfb98 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 19 Nov 2024 17:11:52 -0500 Subject: [PATCH 06/11] :construction: WIP :sparkles: Drop in `mri_robust_template` --- CPAC/longitudinal/robust_template.py | 112 ++++++++ CPAC/longitudinal/wf/anat.py | 259 ++++++++++-------- CPAC/longitudinal/wf/utils.py | 61 +++++ CPAC/pipeline/engine.py | 54 ++-- .../pipeline/nipype_pipeline_engine/engine.py | 6 +- CPAC/pipeline/nodeblock.py | 4 +- CPAC/pipeline/schema.py | 2 +- .../configs/pipeline_config_blank.yml | 4 +- .../configs/pipeline_config_default.yml | 4 +- 9 files changed, 364 insertions(+), 142 deletions(-) create mode 100644 CPAC/longitudinal/wf/utils.py diff --git a/CPAC/longitudinal/robust_template.py b/CPAC/longitudinal/robust_template.py index dccbbb12f5..b88e6f6ff9 100644 --- a/CPAC/longitudinal/robust_template.py +++ b/CPAC/longitudinal/robust_template.py @@ -16,3 +16,115 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Create longitudinal template using ``mri_robust_template``.""" + +import os +from typing import cast, Literal + +from nipype.interfaces.base import ( + File, + InputMultiPath, + isdefined, + OutputMultiPath, + traits, +) +from nipype.interfaces.freesurfer import longitudinal + +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.configuration import Configuration + + +class RobustTemplateInputSpec(longitudinal.RobustTemplateInputSpec): # noqa: D101 + affine = traits.Bool(default_value=False, desc="compute 12 DOF registration") + mapmov = traits.Either( + InputMultiPath(File(exists=False)), + traits.Bool, + argstr="--mapmov %s", + desc="output images: map and resample each input to template", + ) + maxit = traits.Int( + argstr="--maxit %d", + mandatory=False, + desc="iterate max # times (if #tp>2 default 6, else 5 for 2tp reg.)", + ) + + +class RobustTemplateOutputSpec(longitudinal.RobustTemplateOutputSpec): # noqa: D101 + mapmov_outputs = OutputMultiPath( + File(exists=True), + desc="each input mapped and resampled to longitudinal template", + ) + + +class RobustTemplate(longitudinal.RobustTemplate): # noqa: D101 + # STATEMENT OF CHANGES: + # This class is derived from sources licensed under the Apache-2.0 terms, + # and this class has been changed. + + # CHANGES: + # * Added handling for `affind`, `mapmov` and `maxit` + + # ORIGINAL WORK'S ATTRIBUTION NOTICE: + # Copyright (c) 2009-2016, Nipype developers + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + # Prior to release 0.12, Nipype was licensed under a BSD license. + + # Modifications copyright (C) 2024 C-PAC Developers + input_spec = RobustTemplateInputSpec + output_spec = RobustTemplateOutputSpec + + def _format_arg(self, name, spec, value): + if name == "average_metric": + # return enumeration value + return spec.argstr % {"mean": 0, "median": 1}[value] + if name in ("mapmov", "transform_outputs", "scaled_intensity_outputs"): + value = self._list_outputs()[name] + return super()._format_arg(name, spec, value) + + def _list_outputs(self): + """:py:meth:`~nipype.interfaces.freesurfer.RobustTemplate._list_outputs` + `mapmov`.""" + outputs = self.output_spec().get() + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + n_files = len(self.inputs.in_files) + fmt = "{}{:02d}.{}" if n_files > 9 else "{}{:d}.{}" # noqa: PLR2004 + for key, prefix, ext in [ + ("transform_outputs", "tp", "lta"), + ("scaled_intensity_outputs", "is", "txt"), + ("mapmov", "space-longitudinal", "nii.gz"), + ]: + if isdefined(getattr(self.inputs, key)): + fnames = getattr(self.inputs, key) + if fnames is True: + fnames = [fmt.format(prefix, i + 1, ext) for i in range(n_files)] + outputs[key] = [os.path.abspath(x) for x in fnames] + return outputs + + +def mri_robust_template(name: str, cfg: Configuration) -> pe.Node: + """Return a Node to run `mri_robust_template` with common options.""" + node = pe.Node(RobustTemplate(), name=name) + node.set_input("mapmov", True) + node.set_input("transform_outputs", True) + node.set_input( + "average_metric", cfg["longitudinal_template_generation", "average_method"] + ) + node.set_input("affine", cfg["longitudinal_template_generation", "dof"] == 12) # noqa: PLR2004 + max_iter = cast( + int | Literal["default"], cfg["longitudinal_template_generation", "max_iter"] + ) + if isinstance(max_iter, int): + node.set_input("maxit", max_iter) + node.set_input("auto_detect_sensitivity", True) + + return node diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 5b6bc35a5d..34bfe39815 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -25,6 +25,8 @@ from nipype.interfaces.utility import Merge from CPAC.longitudinal.preproc import subject_specific_template +from CPAC.longitudinal.robust_template import mri_robust_template +from CPAC.longitudinal.wf.utils import select_session_node from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.cpac_pipeline import ( build_anat_preproc_stack, @@ -37,7 +39,6 @@ from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN from CPAC.registration.registration import apply_transform from CPAC.utils.configuration import Configuration -from CPAC.utils.interfaces.function import Function from CPAC.utils.utils import check_prov_for_regtool @@ -45,8 +46,6 @@ name="mask_T1w_longitudinal_template", config=["longitudinal_template_generation"], switch=["run"], - option_key="using", - option_val="C-PAC legacy", inputs=["desc-brain_T1w"], outputs=["space-T1w_desc-brain_mask"], ) @@ -82,27 +81,10 @@ def pick_map( return None -def select_session( - session: str, output_brains: list[str], warps: list[str] -) -> tuple[Optional[str], Optional[str]]: - """Select output brain image and warp for given session.""" - brain_path = None - warp_path = None - for brain_path in output_brains: - if f"{session}_" in brain_path: - break - for warp_path in warps: - if f"{session}_" in warp_path: - break - return brain_path, warp_path - - @nodeblock( name="mask_longitudinal_T1w_brain", config=["longitudinal_template_generation"], switch=["run"], - option_key="using", - option_val="C-PAC legacy", inputs=["space-longitudinal_desc-brain_T1w"], outputs=["space-longitudinal_desc-brain_mask"], ) @@ -187,8 +169,6 @@ def warp_longitudinal_T1w_to_template( name="warp_longitudinal_seg_to_T1w", config=["longitudinal_template_generation"], switch=["run"], - option_key="using", - option_val="C-PAC legacy", inputs=[ ( "space-longitudinal_desc-brain_T1w", @@ -250,6 +230,7 @@ def warp_longitudinal_seg_to_T1w( "in_file", ) xfm = (invt, "out_file") + num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] @@ -331,9 +312,6 @@ def anat_longitudinal_wf( """List of lists for every strategy""" session_wfs = {} - cpac_dirs = [] - out_dir: str = config.pipeline_setup["output_directory"]["path"] - orig_pipe_name: str = config.pipeline_setup["pipeline_name"] strats_dct: dict[str, list[tuple[pe.Node, str] | str]] = { @@ -394,95 +372,112 @@ def anat_longitudinal_wf( config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" - template_node_name = "longitudinal_anat_template_brain" - - # This node will generate the longitudinal template (the functions are - # in longitudinal_preproc) - # Later other algorithms could be added to calculate it, like the - # multivariate template from ANTS - # It would just require to change it here. - template_node = subject_specific_template(workflow_name=template_node_name) - - template_node.inputs.set( - avg_method=config.longitudinal_template_generation["average_method"], - dof=config.longitudinal_template_generation["dof"], - interp=config.longitudinal_template_generation["legacy-specific"]["interp"], - cost=config.longitudinal_template_generation["legacy-specific"]["cost"], - convergence_threshold=config.longitudinal_template_generation[ - "legacy-specific" - ]["convergence_threshold"], - thread_pool=config.longitudinal_template_generation["legacy-specific"][ - "thread_pool" - ], - unique_id_list=list(session_wfs.keys()), - ) - num_sessions = len(strats_dct["desc-brain_T1w"]) merge_brains = pe.Node(Merge(num_sessions), name="merge_brains") merge_skulls = pe.Node(Merge(num_sessions), name="merge_skulls") - - for i in list(range(0, num_sessions)): - wf._connect_node_or_path(merge_brains, strats_dct, "desc-brain_T1w", i) - wf._connect_node_or_path(merge_skulls, strats_dct, "desc-head_T1w", i) - wf.connect(merge_brains, "out", template_node, "input_brain_list") - wf.connect(merge_skulls, "out", template_node, "input_skull_list") + wf.add_nodes([merge_brains, merge_skulls]) long_id = f"longitudinal_{subject_id}_strat-desc-brain_T1w" wf, rpool = initiate_rpool(wf, config, part_id=long_id) - rpool.set_data( - "space-longitudinal_desc-brain_T1w", - template_node, - "brain_template", - {}, - "", - template_node_name, - ) - - rpool.set_data( - "space-longitudinal_desc-brain_T1w-template", - template_node, - "brain_template", - {}, - "", - template_node_name, - ) - - rpool.set_data( - "space-longitudinal_desc-reorient_T1w", - template_node, - "skull_template", - {}, - "", - template_node_name, - ) - - rpool.set_data( - "space-longitudinal_desc-reorient_T1w-template", - template_node, - "skull_template", - {}, - "", - template_node_name, - ) - pipeline_blocks = [mask_longitudinal_T1w_brain] - # breakpoint() + pipeline_blocks = build_T1w_registration_stack( rpool, config, pipeline_blocks, space="longitudinal" ) pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) - wf = connect_pipeline(wf, config, rpool, pipeline_blocks) + match config["longitudinal_template_generation", "using"]: + case "C-PAC legacy": + brain_output = "brain_template" + head_output = "skull_template" - excl = [ - "space-longitudinal_desc-brain_T1w", - "space-longitudinal_desc-reorient_T1w", - "space-longitudinal_desc-brain_mask", - ] - rpool.gather_pipes(wf, config, add_excl=excl) + # This node will generate the longitudinal template (the functions are + # in longitudinal_preproc) + # Later other algorithms could be added to calculate it, like the + # multivariate template from ANTS + # It would just require to change it here. + + # multiple variable names here for compatibility with other options later in this function + brain_template_node = wholehead_template_node = template_node = ( + subject_specific_template(workflow_name="longitudinal_anat_template") + ) + + template_node.inputs.set( + avg_method=config.longitudinal_template_generation["average_method"], + dof=config.longitudinal_template_generation["dof"], + interp=config.longitudinal_template_generation["legacy-specific"][ + "interp" + ], + cost=config.longitudinal_template_generation["legacy-specific"]["cost"], + convergence_threshold=config.longitudinal_template_generation[ + "legacy-specific" + ]["convergence_threshold"], + thread_pool=config.longitudinal_template_generation["legacy-specific"][ + "thread_pool" + ], + unique_id_list=list(session_wfs.keys()), + ) + + wf.connect(merge_brains, "out", brain_template_node, "input_brain_list") + wf.connect(merge_skulls, "out", wholehead_template_node, "input_skull_list") + + wf = connect_pipeline(wf, config, rpool, pipeline_blocks) + + excl = [ + "space-longitudinal_desc-brain_T1w", + "space-longitudinal_desc-reorient_T1w", + "space-longitudinal_desc-brain_mask", + ] + rpool.gather_pipes(wf, config, add_excl=excl) + + case "mri_robust_template": + brain_output = head_output = "out_file" + brain_template_node = mri_robust_template( + f"mri_robust_template_brain_{subject_id}", config + ) + wholehead_template_node = mri_robust_template( + f"mri_robust_template_head_{subject_id}", config + ) + wf.connect(merge_brains, "out", brain_template_node, "in_files") + wf.connect(merge_brains, "out", wholehead_template_node, "in_files") + + case _: + msg = ": ".join( + [ + "Invalid 'using' value for longitudinal template generation", + str(config["longitudinal_template_generation", "using"]), + ] + ) + raise ValueError(msg) + + for suffix in ["", "-template"]: + rpool.set_data( + f"space-longitudinal_desc-brain_T1w{suffix}", + brain_template_node, + brain_output, + {}, + "", + brain_template_node.name, + ) + + for desc in ["head", "reorient"]: + rpool.set_data( + f"space-longitudinal_desc-{desc}_T1w{suffix}", + wholehead_template_node, + head_output, + {}, + "", + wholehead_template_node.name, + ) + + for i in list(range(0, num_sessions)): + wf._connect_node_or_path_for_merge( + merge_brains, strats_dct, "desc-brain_T1w", i + ) + wf._connect_node_or_path_for_merge(merge_skulls, strats_dct, "desc-head_T1w", i) if not dry_run: wf.run() @@ -526,19 +521,54 @@ def anat_longitudinal_wf( creds_path=input_creds_path, ) - select_node_name = f"FSL_select_{unique_id}" - select_sess = pe.Node( - Function( - input_names=["session", "output_brains", "warps"], - output_names=["brain_path", "warp_path"], - function=select_session, - ), - name=select_node_name, - ) - select_sess.inputs.session = unique_id - - wf.connect(template_node, "output_brain_list", select_sess, "output_brains") - wf.connect(template_node, "warp_list", select_sess, "warps") + select_sess = select_session_node(unique_id) + + match config["longitudinal_template_generation", "using"]: + case "C-PAC legacy": + wf.connect( + brain_template_node, + "output_brain_list", + select_sess, + "output_brains", + ) + wf.connect(brain_template_node, "warp_list", select_sess, "warps") + + case "mri_robust_template": + wf.connect( + brain_template_node, "mapmov_outputs", select_sess, "output_brains" + ) + wf.connect( + brain_template_node, "transform_outputs", select_sess, "warps" + ) + head_select_sess = select_session_node(unique_id, "wholehead") + wf.connect( + wholehead_template_node, + "mapmov_outputs", + head_select_sess, + "output_brains", + ) + wf.connect( + wholehead_template_node, + "transform_outputs", + head_select_sess, + "warps", + ) + rpool.set_data( + "space-longitudinal_desc-head_T1w", + head_select_sess, + "brain_path", + {}, + "", + head_select_sess.name, + ) + rpool.set_data( + "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", + head_select_sess, + "warp_path", + {}, + "", + head_select_sess.name, + ) rpool.set_data( "space-longitudinal_desc-brain_T1w", @@ -546,16 +576,15 @@ def anat_longitudinal_wf( "brain_path", {}, "", - select_node_name, + select_sess.name, ) - rpool.set_data( "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", select_sess, "warp_path", {}, "", - select_node_name, + select_sess.name, ) config.pipeline_setup["pipeline_name"] = orig_pipe_name @@ -588,7 +617,7 @@ def anat_longitudinal_wf( wf = initialize_nipype_wf(config, subject_id, unique_id) - wf, rpool = initiate_rpool(wf, config, session) + wf, rpool = initiate_rpool(wf, config, session, rpool=rpool) pipeline_blocks = [ warp_longitudinal_T1w_to_template, diff --git a/CPAC/longitudinal/wf/utils.py b/CPAC/longitudinal/wf/utils.py new file mode 100644 index 0000000000..094ba09bb9 --- /dev/null +++ b/CPAC/longitudinal/wf/utils.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2020-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Utilities for longitudinal workflows.""" + +from typing import Optional + +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces.function import Function + + +def select_session( + session: str, output_brains: list[str], warps: list[str] +) -> tuple[Optional[str], Optional[str]]: + """Select output brain image and warp for given session.""" + brain_path = None + warp_path = None + for brain_path in output_brains: + if f"{session}_" in brain_path: + break + for warp_path in warps: + if f"{session}_" in warp_path: + break + return brain_path, warp_path + + +def select_session_node(unique_id: str, suffix: str = "") -> pe.Node: + """Create a Node to select a single subject's output image and transform. + + Note + ---- + FSL is the only currenlty implemented registration tool for longitudinal template + generation, so it's hardcoded into the name of this node for + feeding :py:meth:`~CPAC.utils.utils.check_prov_for_regtool`. + """ + if suffix: + suffix = f"_{suffix.lstrip('_')}" + select_sess = pe.Node( + Function( + input_names=["session", "output_brains", "warps"], + output_names=["brain_path", "warp_path"], + function=select_session, + ), + name=f"longitudinal_select_FSL_{unique_id}{suffix}", + ) + select_sess.inputs.session = unique_id + return select_sess diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 2e7b8987d6..8b9b8315af 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -71,7 +71,9 @@ class ResourcePool: - def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): + def __init__( + self, rpool=None, name: Optional[str] = None, cfg=None, pipe_list=None + ): if not rpool: self.rpool = {} else: @@ -82,7 +84,7 @@ def __init__(self, rpool=None, name=None, cfg=None, pipe_list=None): else: self.pipe_list = pipe_list - self.name = name + self.name = name or "" self.info = {} if cfg: @@ -146,6 +148,29 @@ def __str__(self) -> str: return f"ResourcePool({self.name}): {list(self.rpool)}" return f"ResourcePool: {list(self.rpool)}" + def _set_id_parts(self) -> None: + """Set part_id and ses_id.""" + unique_id = self.name + setattr(self, "_part_id", unique_id.split("_")[0]) + ses_id = unique_id.split("_")[1] + if "ses-" not in ses_id: + ses_id = f"ses-{ses_id}" + setattr(self, "_ses_id", ses_id) + + @property + def part_id(self) -> str: + """Access participant ID.""" + if not hasattr(self, "_part_id"): + self._set_id_parts() + return getattr(self, "_part_id") + + @property + def ses_id(self) -> str: + """Access session ID.""" + if not hasattr(self, "_part_id"): + self._set_id_parts() + return getattr(self, "_ses_id") + def append_name(self, name): self.name.append(name) @@ -194,7 +219,9 @@ def back_propogate_template_name( pass return - def get_name(self): + def get_name(self) -> str: + if not hasattr(self, "_part_id"): + self._set_id_parts() return self.name def check_rpool(self, resource): @@ -1127,22 +1154,17 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # TODO: other stuff like acq- etc. for pipe_idx in self.rpool[resource]: - unique_id = self.get_name() - part_id = unique_id.split("_")[0] - ses_id = unique_id.split("_")[1] - - if "ses-" not in ses_id: - ses_id = f"ses-{ses_id}" - out_dir = cfg.pipeline_setup["output_directory"]["path"] pipe_name = cfg.pipeline_setup["pipeline_name"] - container = os.path.join(f"pipeline_{pipe_name}", part_id, ses_id) - filename = f"{unique_id}_{res_in_filename(self.cfg, resource)}" + container = os.path.join( + f"pipeline_{pipe_name}", self.part_id, self.ses_id + ) + filename = f"{self.get_name()}_{res_in_filename(self.cfg, resource)}" out_path = os.path.join(out_dir, container, subdir, filename) out_dct = { - "unique_id": unique_id, + "unique_id": self.get_name(), "out_dir": out_dir, "container": container, "subdir": subdir, @@ -1368,8 +1390,6 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): except OSError as os_error: WFLOGGER.warning(os_error) continue - except AttributeError: - breakpoint() write_json_imports = ["import os", "import json"] write_json = pe.Node( @@ -2636,7 +2656,7 @@ def initiate_rpool( part_id=None, *, rpool: Optional[ResourcePool] = None, -): +) -> tuple[pe.Workflow, ResourcePool]: """ Initialize a new ResourcePool. @@ -2708,7 +2728,7 @@ def initiate_rpool( # output files with 4 different scans - return (wf, rpool) + return wf, rpool def run_node_blocks(blocks, data_paths, cfg=None): diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 554e6a657c..2627b95d64 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -590,17 +590,17 @@ def _configure_exec_nodes(self, graph): except (FileNotFoundError, KeyError, TypeError): self._handle_just_in_time_exception(node) - def _connect_node_or_path( + def _connect_node_or_path_for_merge( self, node: pe.Node, strats_dct: Mapping[str, Sequence[tuple[pe.Node, str] | str]], key: str, index: int, ) -> None: - """Set input appropriately for either a Node or a path string.""" + """Set input to either a Node or a path string for cross-graph Merge Nodes.""" _input: str = f"in{index + 1}" if isinstance(strats_dct[key][index], str): - setattr(node.inputs, _input, strats_dct[key][index]) + node.set_input(_input, strats_dct[key][index]) else: self.connect(*strats_dct[key][index], node, _input) diff --git a/CPAC/pipeline/nodeblock.py b/CPAC/pipeline/nodeblock.py index 305119b55d..d8aad507c1 100644 --- a/CPAC/pipeline/nodeblock.py +++ b/CPAC/pipeline/nodeblock.py @@ -60,9 +60,9 @@ def __init__( """ self.option_val: Optional[str | list[str]] = option_val """Indicates values for which this NodeBlock should be active.""" - self.inputs: Optional[list[str | list | tuple]] = inputs + self.inputs: list[str | list | tuple] = inputs or [] """ResourcePool keys indicating resources needed for the NodeBlock's functionality.""" - self.outputs: Optional[list[str] | dict[str, Any]] = outputs + self.outputs: list[str] | dict[str, Any] = outputs or [] """ ResourcePool keys indicating resources generated or updated by the NodeBlock, optionally including metadata for the outputs' respective sidecars. diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index f39722dafb..468c46f1a3 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1455,7 +1455,7 @@ def schema(config_dict): if lgt["using"] == "mri_robust_template": check_unimplemented( lgt, - [("average_method", "std"), ("dof", 9), ("max_iter", -1)], + [("average_method", "std"), ("dof", 9), ("dof", 7), ("max_iter", -1)], "longitudinal `mri_robust_template`", ) if lgt["using"] == "C-PAC legacy": diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 349383d14b..dde0dc6de7 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1524,8 +1524,8 @@ longitudinal_template_generation: average_method: median # Degree of freedom for FLIRT in the template creation - # Options: 12 (affine), 7 (global rescale), or 6 (rigid body) - # Additional option if using "C-PAC legacy": 9 (traditional) + # Options: 12 (affine) or 6 (rigid body) + # Additional options if using "C-PAC legacy": 9 (traditional), 7 (global rescale) dof: 12 # Maximum iterations diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 16ae7ea63f..f629792b47 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -260,8 +260,8 @@ longitudinal_template_generation: average_method: median # Degree of freedom for FLIRT in the template creation - # Options: 12 (affine), 7 (global rescale), or 6 (rigid body) - # Additional option if using "C-PAC legacy": 9 (traditional) + # Options: 12 (affine) or 6 (rigid body) + # Additional options if using "C-PAC legacy": 9 (traditional), 7 (global rescale) dof: 12 # Maximum iterations From a5d2c60989532f5b2e441bdb05509d6fa81c38c7 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Tue, 19 Nov 2024 21:46:41 -0500 Subject: [PATCH 07/11] :recycle: SSOT `check_creds_path` --- CPAC/anat_preproc/anat_preproc.py | 4 +- CPAC/longitudinal/robust_template.py | 2 +- CPAC/longitudinal/wf/anat.py | 111 +++++++-------------------- CPAC/longitudinal/wf/func.py | 18 +---- CPAC/longitudinal/wf/utils.py | 15 ++++ 5 files changed, 49 insertions(+), 101 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index a561f8e077..5d1c4aea22 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -34,7 +34,7 @@ wb_command, ) from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.nodeblock import nodeblock +from CPAC.pipeline.nodeblock import nodeblock, NODEBLOCK_RETURN from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge @@ -1447,7 +1447,7 @@ def mask_T2(wf_name="mask_T2"): inputs=["T1w"], outputs=["desc-preproc_T1w", "desc-reorient_T1w", "desc-head_T1w"], ) -def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None): +def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None) -> NODEBLOCK_RETURN: anat_deoblique = pe.Node(interface=afni.Refit(), name=f"anat_deoblique_{pipe_num}") anat_deoblique.inputs.deoblique = True diff --git a/CPAC/longitudinal/robust_template.py b/CPAC/longitudinal/robust_template.py index b88e6f6ff9..e1c5300106 100644 --- a/CPAC/longitudinal/robust_template.py +++ b/CPAC/longitudinal/robust_template.py @@ -49,7 +49,7 @@ class RobustTemplateInputSpec(longitudinal.RobustTemplateInputSpec): # noqa: D1 class RobustTemplateOutputSpec(longitudinal.RobustTemplateOutputSpec): # noqa: D101 - mapmov_outputs = OutputMultiPath( + mapmov = OutputMultiPath( File(exists=True), desc="each input mapped and resampled to longitudinal template", ) diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 34bfe39815..8bbfb299d4 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -17,7 +17,6 @@ # License along with C-PAC. If not, see . """Longitudinal workflows for anatomical data.""" -import os from typing import cast, Optional from nipype import config as nipype_config @@ -26,7 +25,7 @@ from CPAC.longitudinal.preproc import subject_specific_template from CPAC.longitudinal.robust_template import mri_robust_template -from CPAC.longitudinal.wf.utils import select_session_node +from CPAC.longitudinal.wf.utils import check_creds_path, select_session_node from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.cpac_pipeline import ( build_anat_preproc_stack, @@ -322,23 +321,7 @@ def anat_longitudinal_wf( # Loop over the sessions to create the input for the longitudinal algorithm unique_id: str = session["unique_id"] session_id_list.append(unique_id) - - try: - creds_path = session["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" ' - 'session "%s" was not found. Check this path ' - "and try again." % (creds_path, subject_id, unique_id) - ) - raise Exception(err_msg) - else: - input_creds_path = None - except KeyError: - input_creds_path = None + input_creds_path = check_creds_path(session.get("creds_path"), subject_id) workflow: pe.Workflow = initialize_nipype_wf( config, @@ -376,19 +359,16 @@ def anat_longitudinal_wf( merge_brains = pe.Node(Merge(num_sessions), name="merge_brains") merge_skulls = pe.Node(Merge(num_sessions), name="merge_skulls") wf.add_nodes([merge_brains, merge_skulls]) + for i in list(range(0, num_sessions)): + wf._connect_node_or_path_for_merge( + merge_brains, strats_dct, "desc-brain_T1w", i + ) + wf._connect_node_or_path_for_merge(merge_skulls, strats_dct, "desc-head_T1w", i) long_id = f"longitudinal_{subject_id}_strat-desc-brain_T1w" wf, rpool = initiate_rpool(wf, config, part_id=long_id) - pipeline_blocks = [mask_longitudinal_T1w_brain] - - pipeline_blocks = build_T1w_registration_stack( - rpool, config, pipeline_blocks, space="longitudinal" - ) - - pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) - match config["longitudinal_template_generation", "using"]: case "C-PAC legacy": brain_output = "brain_template" @@ -424,15 +404,6 @@ def anat_longitudinal_wf( wf.connect(merge_brains, "out", brain_template_node, "input_brain_list") wf.connect(merge_skulls, "out", wholehead_template_node, "input_skull_list") - wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - - excl = [ - "space-longitudinal_desc-brain_T1w", - "space-longitudinal_desc-reorient_T1w", - "space-longitudinal_desc-brain_mask", - ] - rpool.gather_pipes(wf, config, add_excl=excl) - case "mri_robust_template": brain_output = head_output = "out_file" brain_template_node = mri_robust_template( @@ -473,11 +444,22 @@ def anat_longitudinal_wf( wholehead_template_node.name, ) - for i in list(range(0, num_sessions)): - wf._connect_node_or_path_for_merge( - merge_brains, strats_dct, "desc-brain_T1w", i - ) - wf._connect_node_or_path_for_merge(merge_skulls, strats_dct, "desc-head_T1w", i) + pipeline_blocks = [mask_longitudinal_T1w_brain] + pipeline_blocks = build_T1w_registration_stack( + rpool, config, pipeline_blocks, space="longitudinal" + ) + pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) + + rpool.gather_pipes( + wf, + config, + add_excl=[ + "space-longitudinal_desc-brain_T1w", + "space-longitudinal_desc-reorient_T1w", + "space-longitudinal_desc-brain_mask", + ], + ) + wf = connect_pipeline(wf, config, rpool, pipeline_blocks) if not dry_run: wf.run() @@ -486,27 +468,11 @@ def anat_longitudinal_wf( config.pipeline_setup["pipeline_name"] = orig_pipe_name for session in sub_list: unique_id = session["unique_id"] - - try: - creds_path = session["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" ' - 'session "%s" was not found. Check this path ' - "and try again." % (creds_path, subject_id, unique_id) - ) - raise Exception(err_msg) - else: - input_creds_path = None - except KeyError: - input_creds_path = None + input_creds_path = check_creds_path(session.get("creds_path"), subject_id) wf = initialize_nipype_wf(config, subject_id, unique_id) - wf, rpool = initiate_rpool(wf, config, session, rpool=rpool) + wf, rpool = initiate_rpool(wf, config, session) config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" if "derivatives_dir" in session: @@ -534,16 +500,14 @@ def anat_longitudinal_wf( wf.connect(brain_template_node, "warp_list", select_sess, "warps") case "mri_robust_template": - wf.connect( - brain_template_node, "mapmov_outputs", select_sess, "output_brains" - ) + wf.connect(brain_template_node, "mapmov", select_sess, "output_brains") wf.connect( brain_template_node, "transform_outputs", select_sess, "warps" ) head_select_sess = select_session_node(unique_id, "wholehead") wf.connect( wholehead_template_node, - "mapmov_outputs", + "mapmov", head_select_sess, "output_brains", ) @@ -589,7 +553,6 @@ def anat_longitudinal_wf( config.pipeline_setup["pipeline_name"] = orig_pipe_name excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] - rpool.gather_pipes(wf, config, add_excl=excl) if not dry_run: wf.run() @@ -597,27 +560,11 @@ def anat_longitudinal_wf( # begin single-session stuff again for session in sub_list: unique_id = session["unique_id"] - - try: - creds_path = session["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" ' - 'session "%s" was not found. Check this path ' - "and try again." % (creds_path, subject_id, unique_id) - ) - raise Exception(err_msg) - else: - input_creds_path = None - except KeyError: - input_creds_path = None + input_creds_path = check_creds_path(session.get("creds_path"), subject_id) wf = initialize_nipype_wf(config, subject_id, unique_id) - wf, rpool = initiate_rpool(wf, config, session, rpool=rpool) + wf, rpool = initiate_rpool(wf, config, session) pipeline_blocks = [ warp_longitudinal_T1w_to_template, diff --git a/CPAC/longitudinal/wf/func.py b/CPAC/longitudinal/wf/func.py index 00847073f1..3a7ca0669e 100644 --- a/CPAC/longitudinal/wf/func.py +++ b/CPAC/longitudinal/wf/func.py @@ -22,6 +22,7 @@ import nipype.interfaces.io as nio from CPAC.longitudinal.preproc import subject_specific_template +from CPAC.longitudinal.wf.utils import check_creds_path from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.registration import ( create_fsl_flirt_linear_reg, @@ -82,22 +83,7 @@ def func_preproc_longitudinal_wf(subject_id, sub_list, config): unique_id = sub_dict["unique_id"] session_id_list.append(unique_id) - try: - creds_path = sub_dict["creds_path"] - if creds_path and "none" not in creds_path.lower(): - if os.path.exists(creds_path): - input_creds_path = os.path.abspath(creds_path) - else: - err_msg = ( - 'Credentials path: "%s" for subject "%s" was not ' - "found. Check this path and try again." - % (creds_path, subject_id) - ) - raise Exception(err_msg) - else: - input_creds_path = None - except KeyError: - input_creds_path = None + input_creds_path = check_creds_path(sub_dict.get("creds_path"), subject_id) strat = Strategy() strat_list = [strat] diff --git a/CPAC/longitudinal/wf/utils.py b/CPAC/longitudinal/wf/utils.py index 094ba09bb9..0792446537 100644 --- a/CPAC/longitudinal/wf/utils.py +++ b/CPAC/longitudinal/wf/utils.py @@ -17,12 +17,27 @@ # License along with C-PAC. If not, see . """Utilities for longitudinal workflows.""" +from pathlib import Path from typing import Optional from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.interfaces.function import Function +def check_creds_path(creds_path: Optional[str], subject_id: str) -> Optional[str]: + """Check credentials path.""" + if creds_path and "none" not in creds_path.lower(): + _creds_path = Path(creds_path) + if _creds_path.exists(): + return str(_creds_path.absolute()) + err_msg = ( + 'Credentials path: "%s" for subject "%s" was not ' + "found. Check this path and try again." % (creds_path, subject_id) + ) + raise FileNotFoundError(err_msg) + return None + + def select_session( session: str, output_brains: list[str], warps: list[str] ) -> tuple[Optional[str], Optional[str]]: From 1a90cc0d2f2b22004975a5eb83f3e33775fae32a Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Wed, 20 Nov 2024 21:40:32 -0500 Subject: [PATCH 08/11] :necktie: Make cross-graph and cross-pool connections --- CPAC/longitudinal/robust_template.py | 17 +-- CPAC/longitudinal/wf/anat.py | 104 ++++++++++-------- CPAC/longitudinal/wf/utils.py | 47 ++++++++ .../pipeline/nipype_pipeline_engine/engine.py | 2 +- CPAC/seg_preproc/seg_preproc.py | 1 - 5 files changed, 117 insertions(+), 54 deletions(-) diff --git a/CPAC/longitudinal/robust_template.py b/CPAC/longitudinal/robust_template.py index e1c5300106..b0c6d5e817 100644 --- a/CPAC/longitudinal/robust_template.py +++ b/CPAC/longitudinal/robust_template.py @@ -113,18 +113,21 @@ def _list_outputs(self): def mri_robust_template(name: str, cfg: Configuration) -> pe.Node: """Return a Node to run `mri_robust_template` with common options.""" - node = pe.Node(RobustTemplate(), name=name) - node.set_input("mapmov", True) - node.set_input("transform_outputs", True) - node.set_input( - "average_metric", cfg["longitudinal_template_generation", "average_method"] + node = pe.Node( + RobustTemplate( + affine=cfg["longitudinal_template_generation", "dof"] == 12, # noqa: PLR2004 + average_metric=cfg["longitudinal_template_generation", "average_method"], + auto_detect_sensitivity=True, + mapmov=True, + out_file=f"{name}.nii.gz", + transform_outputs=True, + ), + name=name, ) - node.set_input("affine", cfg["longitudinal_template_generation", "dof"] == 12) # noqa: PLR2004 max_iter = cast( int | Literal["default"], cfg["longitudinal_template_generation", "max_iter"] ) if isinstance(max_iter, int): node.set_input("maxit", max_iter) - node.set_input("auto_detect_sensitivity", True) return node diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 8bbfb299d4..9bcf3d15c7 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -25,7 +25,12 @@ from CPAC.longitudinal.preproc import subject_specific_template from CPAC.longitudinal.robust_template import mri_robust_template -from CPAC.longitudinal.wf.utils import check_creds_path, select_session_node +from CPAC.longitudinal.wf.utils import ( + check_creds_path, + cross_graph_connections, + cross_pool_resources, + select_session_node, +) from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.cpac_pipeline import ( build_anat_preproc_stack, @@ -345,7 +350,7 @@ def anat_longitudinal_wf( for key in strats_dct.keys(): # get the outputs from run-nodes for index, data in enumerate(list(strats_dct[key])): if isinstance(data, tuple): - strats_dct[key][index] = workflow.get_output_path(*data) + strats_dct[key][index] = workflow.get_output(*data) wf = initialize_nipype_wf( config, @@ -460,24 +465,24 @@ def anat_longitudinal_wf( ], ) wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - if not dry_run: wf.run() # now, just write out a copy of the above to each session config.pipeline_setup["pipeline_name"] = orig_pipe_name + longitudinal_rpool = rpool + cpr = cross_pool_resources(f"longitudinal_{subject_id}") for session in sub_list: unique_id = session["unique_id"] input_creds_path = check_creds_path(session.get("creds_path"), subject_id) - wf = initialize_nipype_wf(config, subject_id, unique_id) - - wf, rpool = initiate_rpool(wf, config, session) + ses_wf = initialize_nipype_wf(config, subject_id, unique_id) + ses_wf, rpool = initiate_rpool(ses_wf, config, session) config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" if "derivatives_dir" in session: - rpool = ingress_output_dir( - wf, + ses_wf, rpool = ingress_output_dir( + ses_wf, config, rpool, long_id, @@ -491,32 +496,45 @@ def anat_longitudinal_wf( match config["longitudinal_template_generation", "using"]: case "C-PAC legacy": - wf.connect( - brain_template_node, - "output_brain_list", - select_sess, - "output_brains", - ) - wf.connect(brain_template_node, "warp_list", select_sess, "warps") + for input_name, output_name in [ + ("output_brains", "output_brain_list"), + ("warps", "warp_list"), + ]: + cross_graph_connections( + wf, + ses_wf, + brain_template_node, + select_sess, + output_name, + input_name, + dry_run, + ) case "mri_robust_template": - wf.connect(brain_template_node, "mapmov", select_sess, "output_brains") - wf.connect( - brain_template_node, "transform_outputs", select_sess, "warps" - ) head_select_sess = select_session_node(unique_id, "wholehead") - wf.connect( - wholehead_template_node, - "mapmov", - head_select_sess, - "output_brains", - ) - wf.connect( - wholehead_template_node, - "transform_outputs", - head_select_sess, - "warps", - ) + for input_name, output_name in [ + ("output_brains", "mapmov"), + ("warps", "transform_outputs"), + ]: + cross_graph_connections( + wf, + ses_wf, + brain_template_node, + select_sess, + output_name, + input_name, + dry_run, + ) + cross_graph_connections( + wf, + ses_wf, + wholehead_template_node, + head_select_sess, + output_name, + input_name, + dry_run, + ) + rpool.set_data( "space-longitudinal_desc-head_T1w", head_select_sess, @@ -553,29 +571,25 @@ def anat_longitudinal_wf( config.pipeline_setup["pipeline_name"] = orig_pipe_name excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] - rpool.gather_pipes(wf, config, add_excl=excl) + rpool.gather_pipes(ses_wf, config, add_excl=excl) + cross_pool_keys = ["from-longitudinal_to-template_mode-image_xfm"] + for key in cross_pool_keys: + node, out = longitudinal_rpool.get_data(key) + cross_graph_connections(wf, ses_wf, node, cpr, out, key, dry_run) + rpool.set_data(key, cpr, key, {}, "", cpr.name) if not dry_run: - wf.run() - - # begin single-session stuff again - for session in sub_list: - unique_id = session["unique_id"] - input_creds_path = check_creds_path(session.get("creds_path"), subject_id) - - wf = initialize_nipype_wf(config, subject_id, unique_id) - - wf, rpool = initiate_rpool(wf, config, session) + ses_wf.run() pipeline_blocks = [ warp_longitudinal_T1w_to_template, warp_longitudinal_seg_to_T1w, ] - wf = connect_pipeline(wf, config, rpool, pipeline_blocks) + ses_wf = connect_pipeline(ses_wf, config, rpool, pipeline_blocks) - rpool.gather_pipes(wf, config) + rpool.gather_pipes(ses_wf, config) # this is going to run multiple times! # once for every strategy! if not dry_run: - wf.run() + ses_wf.run() diff --git a/CPAC/longitudinal/wf/utils.py b/CPAC/longitudinal/wf/utils.py index 0792446537..36f2e419cc 100644 --- a/CPAC/longitudinal/wf/utils.py +++ b/CPAC/longitudinal/wf/utils.py @@ -20,6 +20,8 @@ from pathlib import Path from typing import Optional +from nipype.interfaces.utility import IdentityInterface + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.interfaces.function import Function @@ -38,6 +40,43 @@ def check_creds_path(creds_path: Optional[str], subject_id: str) -> Optional[str return None +def cross_graph_connections( + wf1: pe.Workflow, + wf2: pe.Workflow, + node1: pe.Node, + node2: pe.Node, + output_name: str, + input_name: str, + dry_run: bool, +) -> None: + """Make cross-graph connections appropriate to dry-run status. + + Parameters + ---------- + wf1 + The graph that runs first + + wf2 + The graph that runs second + + node1 + The node from ``wf1`` + + node2 + The node from ``wf2`` + + output_name + The output name from ``node1`` + + input_name + The input name from ``node2`` + """ + if dry_run: + wf2.connect(node1, output_name, node2, input_name) + else: + node2.set_input(input_name, wf1.get_output(node1, output_name)) + + def select_session( session: str, output_brains: list[str], warps: list[str] ) -> tuple[Optional[str], Optional[str]]: @@ -74,3 +113,11 @@ def select_session_node(unique_id: str, suffix: str = "") -> pe.Node: ) select_sess.inputs.session = unique_id return select_sess + + +def cross_pool_resources(name: str) -> pe.Node: + """Return an IdentityInterface for cross-pool resources.""" + return pe.Node( + IdentityInterface(fields=["from-longitudinal_to-template_mode-image_xfm"]), + name=name, + ) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 2627b95d64..be72a6a080 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -717,7 +717,7 @@ def _get_dot( WFLOGGER.debug("cross connection: %s", dotlist[-1]) return ("\n" + prefix).join(dotlist) - def get_output_path(self, node: pe.Node, out: str) -> str: + def get_output(self, node: pe.Node, out: str) -> Any: """Get an output path from an already-run Node.""" try: _run_node: pe.Node = next( diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index ed99addc6e..1f85c08787 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -519,7 +519,6 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): # triggered by 'segments' boolean input (-g or --segments) # 'probability_maps' output is a list of individual probability maps # triggered by 'probability_maps' boolean input (-p) - segment = pe.Node( interface=fsl.FAST(), name=f"segment_{pipe_num}", From 322d660f9dc45b16f1e3657de0695136ada2e0a4 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Thu, 21 Nov 2024 16:43:15 -0500 Subject: [PATCH 09/11] :necktie: Convert xfms from `lta` to `mat` --- CPAC/longitudinal/preproc.py | 28 ++++++------- CPAC/longitudinal/robust_template.py | 28 ++++++++++--- CPAC/longitudinal/wf/anat.py | 36 +++++++++++----- CPAC/longitudinal/wf/utils.py | 37 ++++++++++------- .../pipeline/nipype_pipeline_engine/engine.py | 41 ++++++++++++++----- 5 files changed, 113 insertions(+), 57 deletions(-) diff --git a/CPAC/longitudinal/preproc.py b/CPAC/longitudinal/preproc.py index 2dc2be6d36..a884057f41 100644 --- a/CPAC/longitudinal/preproc.py +++ b/CPAC/longitudinal/preproc.py @@ -344,11 +344,20 @@ def flirt_node(in_img, output_img, output_mat): return node_list +def check_convergence(mat_list, mat_type, convergence_threshold) -> bool: + """Test if every transformation matrix has reached the convergence threshold.""" + convergence_list = [ + template_convergence(mat, mat_type, convergence_threshold) for mat in mat_list + ] + return all(convergence_list) + + @Function.sig_imports( [ "from multiprocessing.pool import Pool", "from typing import Literal, Optional", "from nipype.pipeline import engine as pe", + "from CPAC.longitudinal.preproc import check_convergence", ] ) def template_creation_flirt( @@ -469,6 +478,8 @@ def template_creation_flirt( warp_list, ) + output_brain_list = list(input_brain_list) + output_skull_list = list(input_skull_list) # Chris: I added this part because it is mentioned in the paper but I actually never used it # You could run a first register_img_list() with a selected image as starting point and # give the output to this function @@ -477,18 +488,11 @@ def template_creation_flirt( output_brain_list = [node.inputs.out_file for node in init_reg] mat_list = [node.inputs.out_matrix_file for node in init_reg] warp_list = mat_list - # test if every transformation matrix has reached the convergence - convergence_list = [ - template_convergence(mat, mat_type, convergence_threshold) - for mat in mat_list - ] - converged = all(convergence_list) + converged = check_convergence(mat_list, mat_type, convergence_threshold) else: msg = "init_reg must be a list of FLIRT nipype nodes files" raise ValueError(msg) else: - output_brain_list = input_brain_list - output_skull_list = input_skull_list converged = False temporary_brain_template = os.path.join( @@ -564,13 +568,7 @@ def template_creation_flirt( warp_list[index] = warp_list_filenames[index] output_brain_list = [node.inputs.out_file for node in reg_list_node] - - # test if every transformation matrix has reached the convergence - convergence_list = [ - template_convergence(mat, mat_type, convergence_threshold) - for mat in mat_list - ] - converged = all(convergence_list) + converged = check_convergence(mat_list, mat_type, convergence_threshold) if isinstance(thread_pool, int): pool.close() diff --git a/CPAC/longitudinal/robust_template.py b/CPAC/longitudinal/robust_template.py index b0c6d5e817..e76fedda61 100644 --- a/CPAC/longitudinal/robust_template.py +++ b/CPAC/longitudinal/robust_template.py @@ -28,6 +28,7 @@ traits, ) from nipype.interfaces.freesurfer import longitudinal +from nipype.interfaces.freesurfer.utils import LTAConvert from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.configuration import Configuration @@ -61,7 +62,8 @@ class RobustTemplate(longitudinal.RobustTemplate): # noqa: D101 # and this class has been changed. # CHANGES: - # * Added handling for `affind`, `mapmov` and `maxit` + # * Added handling for `affine`, `mapmov` and `maxit`. + # * Renamed transform outputs. # ORIGINAL WORK'S ATTRIBUTION NOTICE: # Copyright (c) 2009-2016, Nipype developers @@ -99,7 +101,7 @@ def _list_outputs(self): n_files = len(self.inputs.in_files) fmt = "{}{:02d}.{}" if n_files > 9 else "{}{:d}.{}" # noqa: PLR2004 for key, prefix, ext in [ - ("transform_outputs", "tp", "lta"), + ("transform_outputs", "space-longitudinal", "lta"), ("scaled_intensity_outputs", "is", "txt"), ("mapmov", "space-longitudinal", "nii.gz"), ]: @@ -111,8 +113,14 @@ def _list_outputs(self): return outputs -def mri_robust_template(name: str, cfg: Configuration) -> pe.Node: - """Return a Node to run `mri_robust_template` with common options.""" +def mri_robust_template( + name: str, cfg: Configuration, num_sessions: int +) -> pe.Workflow: + """Return a subworkflow to run `mri_robust_template` with common options. + + Converts transform files to FSL format. + """ + wf = pe.Workflow(name=name) node = pe.Node( RobustTemplate( affine=cfg["longitudinal_template_generation", "dof"] == 12, # noqa: PLR2004 @@ -122,7 +130,7 @@ def mri_robust_template(name: str, cfg: Configuration) -> pe.Node: out_file=f"{name}.nii.gz", transform_outputs=True, ), - name=name, + name="mri_robust_template", ) max_iter = cast( int | Literal["default"], cfg["longitudinal_template_generation", "max_iter"] @@ -130,4 +138,12 @@ def mri_robust_template(name: str, cfg: Configuration) -> pe.Node: if isinstance(max_iter, int): node.set_input("maxit", max_iter) - return node + convert = pe.MapNode( + LTAConvert(), name="convert-to-FSL", iterfield=["in_lta", "out_fsl"] + ) + wf.connect(node, "transform_outputs", convert, "in_lta") + convert.set_input( + "out_fsl", [f"space-longitudinal{i}.mat" for i in range(num_sessions)] + ) + + return wf diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 9bcf3d15c7..3d3ba5b7fb 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -133,7 +133,6 @@ def warp_longitudinal_T1w_to_template( "from-longitudinal_to-template_mode-image_xfm" ) reg_tool = check_prov_for_regtool(xfm_prov) - num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] @@ -410,15 +409,22 @@ def anat_longitudinal_wf( wf.connect(merge_skulls, "out", wholehead_template_node, "input_skull_list") case "mri_robust_template": - brain_output = head_output = "out_file" + brain_output = head_output = "mri_robust_template.out_file" brain_template_node = mri_robust_template( - f"mri_robust_template_brain_{subject_id}", config + f"mri_robust_template_brain_{subject_id}", config, len(sub_list) ) wholehead_template_node = mri_robust_template( - f"mri_robust_template_head_{subject_id}", config + f"mri_robust_template_head_{subject_id}", config, len(sub_list) + ) + wf.connect( + merge_brains, "out", brain_template_node, "mri_robust_template.in_files" + ) + wf.connect( + merge_brains, + "out", + wholehead_template_node, + "mri_robust_template.in_files", ) - wf.connect(merge_brains, "out", brain_template_node, "in_files") - wf.connect(merge_brains, "out", wholehead_template_node, "in_files") case _: msg = ": ".join( @@ -471,8 +477,10 @@ def anat_longitudinal_wf( # now, just write out a copy of the above to each session config.pipeline_setup["pipeline_name"] = orig_pipe_name longitudinal_rpool = rpool - cpr = cross_pool_resources(f"longitudinal_{subject_id}") - for session in sub_list: + cpr = cross_pool_resources( + f"fsl_longitudinal_{subject_id}" + ) # "fsl" for check_prov_for_regtool + for i, session in enumerate(sub_list): unique_id = session["unique_id"] input_creds_path = check_creds_path(session.get("creds_path"), subject_id) @@ -496,6 +504,7 @@ def anat_longitudinal_wf( match config["longitudinal_template_generation", "using"]: case "C-PAC legacy": + assert isinstance(brain_template_node, pe.Node) for input_name, output_name in [ ("output_brains", "output_brain_list"), ("warps", "warp_list"), @@ -511,10 +520,15 @@ def anat_longitudinal_wf( ) case "mri_robust_template": - head_select_sess = select_session_node(unique_id, "wholehead") + assert isinstance(brain_template_node, pe.Workflow) + assert isinstance(wholehead_template_node, pe.Workflow) + index = i + 1 + head_select_sess = select_session_node(unique_id, "-wholehead") + select_sess.set_input("session", f"space-longitudinal{index}") + head_select_sess.set_input("session", f"space-longitudinal{index}") for input_name, output_name in [ - ("output_brains", "mapmov"), - ("warps", "transform_outputs"), + ("output_brains", "mri_robust_template.mapmov"), + ("warps", "convert-to-FSL_.out_fsl"), ]: cross_graph_connections( wf, diff --git a/CPAC/longitudinal/wf/utils.py b/CPAC/longitudinal/wf/utils.py index 36f2e419cc..23908c144e 100644 --- a/CPAC/longitudinal/wf/utils.py +++ b/CPAC/longitudinal/wf/utils.py @@ -18,7 +18,7 @@ """Utilities for longitudinal workflows.""" from pathlib import Path -from typing import Optional +from typing import cast, Optional from nipype.interfaces.utility import IdentityInterface @@ -43,8 +43,8 @@ def check_creds_path(creds_path: Optional[str], subject_id: str) -> Optional[str def cross_graph_connections( wf1: pe.Workflow, wf2: pe.Workflow, - node1: pe.Node, - node2: pe.Node, + node1: pe.Node | pe.Workflow, + node2: pe.Node | pe.Workflow, output_name: str, input_name: str, dry_run: bool, @@ -71,6 +71,9 @@ def cross_graph_connections( input_name The input name from ``node2`` """ + if isinstance(node1, pe.Workflow): + sub_node_name, output_name = output_name.rsplit(".", 1) + node1 = cast(pe.Node, node1.get_node(sub_node_name)) if dry_run: wf2.connect(node1, output_name, node2, input_name) else: @@ -79,17 +82,23 @@ def cross_graph_connections( def select_session( session: str, output_brains: list[str], warps: list[str] -) -> tuple[Optional[str], Optional[str]]: +) -> tuple[str, str]: """Select output brain image and warp for given session.""" - brain_path = None - warp_path = None - for brain_path in output_brains: - if f"{session}_" in brain_path: - break - for warp_path in warps: - if f"{session}_" in warp_path: - break - return brain_path, warp_path + try: + return next( + iter(brain_path for brain_path in output_brains if session in brain_path) + ), next(iter(warp_path for warp_path in warps if session in warp_path)) + except StopIteration as stop_iteration: + brain_paths_found = [ + brain_path for brain_path in output_brains if session in brain_path + ] + warps_found = [warp_path for warp_path in warps if session in warp_path] + msg = "" + if not brain_paths_found: + msg += f"{session} not found in {output_brains}.\n" + if not warps_found: + msg += f"{session} not found in {warps}.\n" + raise FileNotFoundError(msg) from stop_iteration def select_session_node(unique_id: str, suffix: str = "") -> pe.Node: @@ -111,7 +120,7 @@ def select_session_node(unique_id: str, suffix: str = "") -> pe.Node: ), name=f"longitudinal_select_FSL_{unique_id}{suffix}", ) - select_sess.inputs.session = unique_id + select_sess.set_input("session", f"{unique_id}_") return select_sess diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index be72a6a080..455e38280c 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -56,7 +56,7 @@ from inspect import Parameter, Signature, signature import os import re -from typing import Any, ClassVar, Optional, TYPE_CHECKING +from typing import Any, cast, ClassVar, Optional, TYPE_CHECKING from numpy import prod from traits.trait_base import Undefined @@ -719,19 +719,38 @@ def _get_dot( def get_output(self, node: pe.Node, out: str) -> Any: """Get an output path from an already-run Node.""" + result_nodes = cast(list[pe.Node], self.run(updatehash=True).nodes) + orig_wd = os.getcwd() + output = Undefined try: + # look for exact match _run_node: pe.Node = next( - iter( - _ - for _ in self.run(updatehash=True).nodes - if _.fullname == node.fullname - ) + iter(_ for _ in result_nodes if _.fullname == node.fullname) ) - except IndexError as index_error: - msg = f"Could not find {node.fullname} in {self}'s run Nodes." - raise LookupError(msg) from index_error - _res: InterfaceResult = _run_node.run() - return getattr(_res.outputs, out) + except StopIteration as stop_interation: + # look for match in subgraph + try: + _run_node: pe.Node = next( + iter( + _ + for _ in result_nodes + if node.fullname + and _.fullname + and _.fullname.endswith(node.fullname) + ) + ) + except StopIteration: + msg = f"Could not find {node.fullname} in {self}'s run Nodes." + raise LookupError(msg) from stop_interation + try: + os.chdir(_run_node.output_dir()) + _res: InterfaceResult = _run_node.run() + output = getattr(_res.outputs, out) + if output is Undefined: + output = _run_node.interface._list_outputs().get(out, Undefined) + finally: + os.chdir(orig_wd) + return output def _handle_just_in_time_exception(self, node): # pylint: disable=protected-access From 342dae24bf53ca4287dd931913d77e866e784b86 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Fri, 22 Nov 2024 23:32:18 -0500 Subject: [PATCH 10/11] :necktie: Clarify longitudinal `xfm`s vs longitudinal `warp`s --- CPAC/longitudinal/robust_template.py | 18 ++++- CPAC/longitudinal/wf/anat.py | 117 +++++++++++++++++---------- CPAC/longitudinal/wf/utils.py | 53 +++++++++--- CPAC/registration/registration.py | 8 +- CPAC/utils/utils.py | 14 ++-- 5 files changed, 148 insertions(+), 62 deletions(-) diff --git a/CPAC/longitudinal/robust_template.py b/CPAC/longitudinal/robust_template.py index e76fedda61..68e9a64103 100644 --- a/CPAC/longitudinal/robust_template.py +++ b/CPAC/longitudinal/robust_template.py @@ -28,6 +28,7 @@ traits, ) from nipype.interfaces.freesurfer import longitudinal +from nipype.interfaces.freesurfer.preprocess import MRIConvert from nipype.interfaces.freesurfer.utils import LTAConvert from CPAC.pipeline import nipype_pipeline_engine as pe @@ -51,7 +52,7 @@ class RobustTemplateInputSpec(longitudinal.RobustTemplateInputSpec): # noqa: D1 class RobustTemplateOutputSpec(longitudinal.RobustTemplateOutputSpec): # noqa: D101 mapmov = OutputMultiPath( - File(exists=True), + File(), desc="each input mapped and resampled to longitudinal template", ) @@ -127,7 +128,7 @@ def mri_robust_template( average_metric=cfg["longitudinal_template_generation", "average_method"], auto_detect_sensitivity=True, mapmov=True, - out_file=f"{name}.nii.gz", + out_file=f"{name}.mgz", transform_outputs=True, ), name="mri_robust_template", @@ -138,12 +139,23 @@ def mri_robust_template( if isinstance(max_iter, int): node.set_input("maxit", max_iter) + nifti_template = pe.Node(MRIConvert(out_type="niigz"), name="NIfTI-template") + wf.connect(node, "out_file", nifti_template, "in_file") + + nifti_outputs = pe.MapNode( + MRIConvert(), name="NIfTI-mapmov", iterfield=["in_file", "out_file"] + ) + wf.connect(node, "mapmov", nifti_outputs, "in_file") + nifti_outputs.set_input( + "out_file", [f"space-longitudinal{i + 1}.nii.gz" for i in range(num_sessions)] + ) + convert = pe.MapNode( LTAConvert(), name="convert-to-FSL", iterfield=["in_lta", "out_fsl"] ) wf.connect(node, "transform_outputs", convert, "in_lta") convert.set_input( - "out_fsl", [f"space-longitudinal{i}.mat" for i in range(num_sessions)] + "out_fsl", [f"space-longitudinal{i + 1}.mat" for i in range(num_sessions)] ) return wf diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 3d3ba5b7fb..81fcc4f848 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -19,6 +19,7 @@ from typing import cast, Optional +from networkx.classes.digraph import DiGraph from nipype import config as nipype_config from nipype.interfaces import fsl from nipype.interfaces.utility import Merge @@ -28,7 +29,7 @@ from CPAC.longitudinal.wf.utils import ( check_creds_path, cross_graph_connections, - cross_pool_resources, + get_output_from_graph, select_session_node, ) from CPAC.pipeline import nipype_pipeline_engine as pe @@ -191,17 +192,19 @@ def warp_longitudinal_T1w_to_template( ), "T1w-brain-template", ], - outputs=[ - "label-CSF_mask", - "label-GM_mask", - "label-WM_mask", - "label-CSF_desc-preproc_mask", - "label-GM_desc-preproc_mask", - "label-WM_desc-preproc_mask", - "label-CSF_probseg", - "label-GM_probseg", - "label-WM_probseg", - ], + outputs={ + "from-longitudinal_to-T1w_mode-image_desc-linear_xfm": {}, + "from-longitudinal_to-T1w_mode-image_desc-linear_warp": {}, + "label-CSF_mask": {}, + "label-GM_mask": {}, + "label-WM_mask": {}, + "label-CSF_desc-preproc_mask": {}, + "label-GM_desc-preproc_mask": {}, + "label-WM_desc-preproc_mask": {}, + "label-CSF_probseg": {}, + "label-GM_probseg": {}, + "label-WM_probseg": {}, + }, ) def warp_longitudinal_seg_to_T1w( wf: pe.Workflow, @@ -211,6 +214,7 @@ def warp_longitudinal_seg_to_T1w( opt: Optional[str] = None, ) -> NODEBLOCK_RETURN: """Transform anatomical images from longitudinal space template space.""" + outputs = {} if strat_pool.check_rpool("from-longitudinal_to-T1w_mode-image_desc-linear_xfm"): xfm_prov = strat_pool.get_cpac_provenance( "from-longitudinal_to-T1w_mode-image_desc-linear_xfm" @@ -233,13 +237,21 @@ def warp_longitudinal_seg_to_T1w( "in_file", ) xfm = (invt, "out_file") + outputs["from-longitudinal_to-T1w_mode-image_desc-linear_xfm"] = xfm + if reg_tool != "fsl": + msg = f"`warp_longitudinal_seg_to_T1w` not yet implemented for {reg_tool}." + raise NotImplementedError(msg) + warp = pe.Node( + fsl.ConvertWarp(relwarp=True, out_relwarp=True), name=f"convert_warp_{pipe_num}" + ) + wf.connect(*xfm, warp, "postmat") + wf.connect( + *strat_pool.get_data("space-longitudinal_desc-brain_T1w"), warp, "reference" + ) + outputs["from-longitudinal_to-T1w_mode-image_desc-linear_warp"] = warp, "out_file" num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"] - num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] - - outputs = {} - labels = [ "CSF_mask", "CSF_desc-preproc_mask", @@ -251,7 +263,6 @@ def warp_longitudinal_seg_to_T1w( "WM_desc-preproc_mask", "WM_probseg", ] - for label in labels: apply_xfm = apply_transform( f"warp_longitudinal_seg_to_T1w_{label}_{pipe_num}", @@ -276,11 +287,10 @@ def warp_longitudinal_seg_to_T1w( node, out = strat_pool.get_data("T1w-brain-template") wf.connect(node, out, apply_xfm, "inputspec.reference") - wf.connect(*xfm, apply_xfm, "inputspec.transform") - + wf.connect(warp, "out_file", apply_xfm, "inputspec.transform") outputs[f"label-{label}"] = (apply_xfm, "outputspec.output_image") - return (wf, outputs) + return wf, outputs def anat_longitudinal_wf( @@ -345,11 +355,13 @@ def anat_longitudinal_wf( for key in strats_dct.keys(): strats_dct[key].append(cast(tuple[pe.Node, str], rpool.get_data(key))) if not dry_run: - workflow.run() + workflow_graph: DiGraph = workflow.run() for key in strats_dct.keys(): # get the outputs from run-nodes for index, data in enumerate(list(strats_dct[key])): if isinstance(data, tuple): - strats_dct[key][index] = workflow.get_output(*data) + strats_dct[key][index] = get_output_from_graph( + workflow_graph, *data + ) wf = initialize_nipype_wf( config, @@ -409,7 +421,7 @@ def anat_longitudinal_wf( wf.connect(merge_skulls, "out", wholehead_template_node, "input_skull_list") case "mri_robust_template": - brain_output = head_output = "mri_robust_template.out_file" + brain_output = head_output = "NIfTI-template.out_file" brain_template_node = mri_robust_template( f"mri_robust_template_brain_{subject_id}", config, len(sub_list) ) @@ -420,7 +432,7 @@ def anat_longitudinal_wf( merge_brains, "out", brain_template_node, "mri_robust_template.in_files" ) wf.connect( - merge_brains, + merge_skulls, "out", wholehead_template_node, "mri_robust_template.in_files", @@ -471,15 +483,14 @@ def anat_longitudinal_wf( ], ) wf = connect_pipeline(wf, config, rpool, pipeline_blocks) - if not dry_run: - wf.run() + + wf_graph: DiGraph | pe.Workflow = ( + cast(DiGraph, wf.run()) if not dry_run else cast(pe.Workflow, wf) + ) # now, just write out a copy of the above to each session config.pipeline_setup["pipeline_name"] = orig_pipe_name longitudinal_rpool = rpool - cpr = cross_pool_resources( - f"fsl_longitudinal_{subject_id}" - ) # "fsl" for check_prov_for_regtool for i, session in enumerate(sub_list): unique_id = session["unique_id"] input_creds_path = check_creds_path(session.get("creds_path"), subject_id) @@ -504,49 +515,61 @@ def anat_longitudinal_wf( match config["longitudinal_template_generation", "using"]: case "C-PAC legacy": - assert isinstance(brain_template_node, pe.Node) + cross_graph_connections( + wf_graph, + ses_wf, + merge_brains, + brain_template_node, + "out", + "input_brain_list", + ) + cross_graph_connections( + wf_graph, + ses_wf, + merge_skulls, + brain_template_node, + "out", + "input_skull_list", + ) for input_name, output_name in [ ("output_brains", "output_brain_list"), ("warps", "warp_list"), ]: cross_graph_connections( - wf, + wf_graph, ses_wf, brain_template_node, select_sess, output_name, input_name, - dry_run, ) case "mri_robust_template": assert isinstance(brain_template_node, pe.Workflow) assert isinstance(wholehead_template_node, pe.Workflow) index = i + 1 - head_select_sess = select_session_node(unique_id, "-wholehead") + head_select_sess = select_session_node(unique_id, "wholehead") select_sess.set_input("session", f"space-longitudinal{index}") head_select_sess.set_input("session", f"space-longitudinal{index}") for input_name, output_name in [ - ("output_brains", "mri_robust_template.mapmov"), + ("output_brains", "NIfTI-mapmov_.out_file"), ("warps", "convert-to-FSL_.out_fsl"), ]: cross_graph_connections( - wf, + wf_graph, ses_wf, brain_template_node, select_sess, output_name, input_name, - dry_run, ) cross_graph_connections( - wf, + wf_graph, ses_wf, wholehead_template_node, head_select_sess, output_name, input_name, - dry_run, ) rpool.set_data( @@ -589,8 +612,20 @@ def anat_longitudinal_wf( cross_pool_keys = ["from-longitudinal_to-template_mode-image_xfm"] for key in cross_pool_keys: node, out = longitudinal_rpool.get_data(key) - cross_graph_connections(wf, ses_wf, node, cpr, out, key, dry_run) - rpool.set_data(key, cpr, key, {}, "", cpr.name) + try: + json_info: dict = longitudinal_rpool.get_json( + key, next(iter(longitudinal_rpool.rpool[key].keys())) + ) + except (AttributeError, KeyError, StopIteration): + json_info = {} + rpool.set_data( + key, + node, + out, + json_info, + "", + f"fsl_longitudinal_{subject_id}", # "fsl" for check_prov_for_regtool + ) if not dry_run: ses_wf.run() @@ -605,5 +640,5 @@ def anat_longitudinal_wf( # this is going to run multiple times! # once for every strategy! - if not dry_run: + if not dry_run: # check select_sess ses_wf.run() diff --git a/CPAC/longitudinal/wf/utils.py b/CPAC/longitudinal/wf/utils.py index 23908c144e..ce5f5fd9fe 100644 --- a/CPAC/longitudinal/wf/utils.py +++ b/CPAC/longitudinal/wf/utils.py @@ -18,8 +18,9 @@ """Utilities for longitudinal workflows.""" from pathlib import Path -from typing import cast, Optional +from typing import Any, cast, Optional +from networkx.classes.digraph import DiGraph from nipype.interfaces.utility import IdentityInterface from CPAC.pipeline import nipype_pipeline_engine as pe @@ -40,21 +41,27 @@ def check_creds_path(creds_path: Optional[str], subject_id: str) -> Optional[str return None +@Function.sig_imports( + [ + "from networkx.classes.digraph import DiGraph", + "from CPAC.pipeline import nipype_pipeline_engine as pe", + "from CPAC.longitudinal.wf.utils import get_output_from_graph", + ] +) def cross_graph_connections( - wf1: pe.Workflow, + wf1: DiGraph | pe.Workflow, wf2: pe.Workflow, node1: pe.Node | pe.Workflow, node2: pe.Node | pe.Workflow, output_name: str, input_name: str, - dry_run: bool, ) -> None: """Make cross-graph connections appropriate to dry-run status. Parameters ---------- wf1 - The graph that runs first + The results of the graph that already ran wf2 The graph that runs second @@ -71,13 +78,15 @@ def cross_graph_connections( input_name The input name from ``node2`` """ - if isinstance(node1, pe.Workflow): - sub_node_name, output_name = output_name.rsplit(".", 1) - node1 = cast(pe.Node, node1.get_node(sub_node_name)) - if dry_run: + if isinstance(wf1, pe.Workflow): # dry run + if isinstance(node1, pe.Workflow): + sub_node_name, output_name = output_name.rsplit(".", 1) + node1 = cast(pe.Node, node1.get_node(sub_node_name)) wf2.connect(node1, output_name, node2, input_name) else: - node2.set_input(input_name, wf1.get_output(node1, output_name)) + setattr( + node2.inputs, input_name, get_output_from_graph(wf1, node1, output_name) + ) def select_session( @@ -130,3 +139,29 @@ def cross_pool_resources(name: str) -> pe.Node: IdentityInterface(fields=["from-longitudinal_to-template_mode-image_xfm"]), name=name, ) + + +def get_output_from_graph( + graph: DiGraph, node: pe.Node | pe.Workflow, output_name: str +) -> Any: + """Get an output from a graph that has been run.""" + nodename = str(node.fullname) + if isinstance(node, pe.Workflow): + sub_node_name, output_name = output_name.rsplit(".", 1) + nodename = f"{nodename}.{sub_node_name}" + try: + output = getattr( + next( + iter( + _node + for _node in graph + if _node.fullname.endswith(nodename) + or _node.fullname.endswith(f"{nodename}_") + ) + ).result.outputs, + output_name, + ) + except StopIteration as stop_iteration: + msg = f"{nodename} not found in completed workflow." + raise FileNotFoundError(msg) from stop_iteration + return output diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index b8456393d4..a2e62790bd 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2338,15 +2338,15 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, fsl, "inputspec.reference_mask") if "space-longitudinal" in brain: - for key in outputs.keys(): + for key in list(outputs.keys()): if "from-T1w" in key: new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] + # del outputs[key] if "to-T1w" in key: new_key = key.replace("to-T1w", "to-longitudinal") outputs[new_key] = outputs[key] - del outputs[key] + # del outputs[key] return (wf, outputs) @@ -2429,7 +2429,7 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N wf.connect(node, out, fsl, "inputspec.reference_mask") if "space-longitudinal" in brain: - for key in outputs.keys(): + for key in list(outputs.keys()): if "from-T1w" in key: new_key = key.replace("from-T1w", "from-longitudinal") outputs[new_key] = outputs[key] diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index b459262993..8f2810e566 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -159,6 +159,7 @@ def create_id_string( fwhm=None, subdir=None, extension=None, + subject_level: bool = False, ): """Create the unique key-value identifier string for BIDS-Derivatives file names. @@ -190,12 +191,15 @@ def create_id_string( ses_id = unique_id.split("_")[1] if "sub-" not in part_id: part_id = f"sub-{part_id}" - if "ses-" not in ses_id: - ses_id = f"ses-{ses_id}" - if scan_id: - out_filename = f"{part_id}_{ses_id}_task-{scan_id}_{resource}" + if subject_level: + out_filename = f"{part_id}_{resource}" else: - out_filename = f"{part_id}_{ses_id}_{resource}" + if "ses-" not in ses_id: + ses_id = f"ses-{ses_id}" + if scan_id: + out_filename = f"{part_id}_{ses_id}_task-{scan_id}_{resource}" + else: + out_filename = f"{part_id}_{ses_id}_{resource}" template_tag = template_desc.split(" -")[0] if template_desc else "*" for prefix in ["space-", "from-", "to-"]: From 7ce9361aa62ca72e6501162e91b51faa41647e55 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 25 Nov 2024 14:24:46 -0500 Subject: [PATCH 11/11] :recycle: Differentiate longitudinal template images from session images in longitudinal template space --- CPAC/longitudinal/wf/anat.py | 82 ++++++++++++++++--------------- CPAC/longitudinal/wf/utils.py | 9 ---- CPAC/pipeline/engine.py | 15 ++++-- CPAC/registration/registration.py | 77 ++++++++++++++++++++++------- CPAC/resources/cpac_outputs.tsv | 6 +++ CPAC/seg_preproc/seg_preproc.py | 18 +++++-- CPAC/utils/utils.py | 29 ++++++----- 7 files changed, 152 insertions(+), 84 deletions(-) diff --git a/CPAC/longitudinal/wf/anat.py b/CPAC/longitudinal/wf/anat.py index 81fcc4f848..c68048ac77 100644 --- a/CPAC/longitudinal/wf/anat.py +++ b/CPAC/longitudinal/wf/anat.py @@ -106,7 +106,12 @@ def mask_longitudinal_T1w_brain( node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") wf.connect(node, out, brain_mask, "in_file") - outputs = {"space-longitudinal_desc-brain_mask": (brain_mask, "out_file")} + outputs = { + "space-longitudinal_desc-brain_mask": ( + brain_mask, + "out_file", + ) + } return (wf, outputs) @@ -115,16 +120,14 @@ def mask_longitudinal_T1w_brain( name="warp_longitudinal_T1w_to_template", config=["longitudinal_template_generation"], switch=["run"], - option_key="using", - option_val="C-PAC legacy", inputs=[ ( - "space-longitudinal_desc-brain_T1w", + "longitudinal-template_space-longitudinal_desc-brain_T1w", "from-longitudinal_to-template_mode-image_xfm", ), "T1w-brain-template", ], - outputs=["space-template_desc-brain_T1w"], + outputs=["longitudinal-template_space-template_desc-brain_T1w"], ) def warp_longitudinal_T1w_to_template( wf, cfg, strat_pool, pipe_num, opt=None @@ -139,7 +142,7 @@ def warp_longitudinal_T1w_to_template( num_ants_cores = cfg.pipeline_setup["system_config"]["num_ants_threads"] apply_xfm = apply_transform( - f"warp_longitudinal_to_T1template_{pipe_num}", + f"warp_longitudinal_to_template_{pipe_num}", reg_tool, time_series=False, num_cpus=num_cpus, @@ -155,7 +158,9 @@ def warp_longitudinal_T1w_to_template( "anatomical_registration" ]["registration"]["FSL-FNIRT"]["interpolation"] - node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w") + node, out = strat_pool.get_data( + "longitudinal-template_space-longitudinal_desc-brain_T1w" + ) wf.connect(node, out, apply_xfm, "inputspec.input_image") node, out = strat_pool.get_data("T1w-brain-template") @@ -164,9 +169,14 @@ def warp_longitudinal_T1w_to_template( node, out = strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm") wf.connect(node, out, apply_xfm, "inputspec.transform") - outputs = {"space-template_desc-brain_T1w": (apply_xfm, "outputspec.output_image")} + outputs = { + "longitudinal-template_space-template_desc-brain_T1w": ( + apply_xfm, + "outputspec.output_image", + ) + } - return (wf, outputs) + return wf, outputs @nodeblock( @@ -213,7 +223,7 @@ def warp_longitudinal_seg_to_T1w( pipe_num: int, opt: Optional[str] = None, ) -> NODEBLOCK_RETURN: - """Transform anatomical images from longitudinal space template space.""" + """Transform anatomical segmentation from longitudinal template to T1w space.""" outputs = {} if strat_pool.check_rpool("from-longitudinal_to-T1w_mode-image_desc-linear_xfm"): xfm_prov = strat_pool.get_cpac_provenance( @@ -229,7 +239,7 @@ def warp_longitudinal_seg_to_T1w( ) reg_tool = check_prov_for_regtool(xfm_prov) # create inverse xfm if we don't have it - invt = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") + invt = pe.Node(interface=fsl.ConvertXFM(), name=f"convert_xfm_{pipe_num}") invt.inputs.invert_xfm = True wf.connect( *strat_pool.get_data("from-T1w_to-longitudinal_mode-image_desc-linear_xfm"), @@ -246,7 +256,9 @@ def warp_longitudinal_seg_to_T1w( ) wf.connect(*xfm, warp, "postmat") wf.connect( - *strat_pool.get_data("space-longitudinal_desc-brain_T1w"), warp, "reference" + *strat_pool.get_data("space-longitudinal_desc-brain_T1w"), + warp, + "reference", ) outputs["from-longitudinal_to-T1w_mode-image_desc-linear_warp"] = warp, "out_file" @@ -369,8 +381,6 @@ def anat_longitudinal_wf( name="template_node_brain", ) - config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" - num_sessions = len(strats_dct["desc-brain_T1w"]) merge_brains = pe.Node(Merge(num_sessions), name="merge_brains") merge_skulls = pe.Node(Merge(num_sessions), name="merge_skulls") @@ -381,9 +391,9 @@ def anat_longitudinal_wf( ) wf._connect_node_or_path_for_merge(merge_skulls, strats_dct, "desc-head_T1w", i) - long_id = f"longitudinal_{subject_id}_strat-desc-brain_T1w" + long_id = f"{subject_id}_desc-brain_T1w" - wf, rpool = initiate_rpool(wf, config, part_id=long_id) + wf, rpool = initiate_rpool(wf, config, part_id=subject_id) match config["longitudinal_template_generation", "using"]: case "C-PAC legacy": @@ -447,9 +457,9 @@ def anat_longitudinal_wf( ) raise ValueError(msg) - for suffix in ["", "-template"]: + for prefix in ["", "longitudinal-template_"]: rpool.set_data( - f"space-longitudinal_desc-brain_T1w{suffix}", + f"{prefix}space-longitudinal_desc-brain_T1w", brain_template_node, brain_output, {}, @@ -459,7 +469,7 @@ def anat_longitudinal_wf( for desc in ["head", "reorient"]: rpool.set_data( - f"space-longitudinal_desc-{desc}_T1w{suffix}", + f"{prefix}space-longitudinal_desc-{desc}_T1w", wholehead_template_node, head_output, {}, @@ -471,15 +481,17 @@ def anat_longitudinal_wf( pipeline_blocks = build_T1w_registration_stack( rpool, config, pipeline_blocks, space="longitudinal" ) - pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) + + pipeline_blocks += [warp_longitudinal_T1w_to_template] rpool.gather_pipes( wf, config, add_excl=[ - "space-longitudinal_desc-brain_T1w", - "space-longitudinal_desc-reorient_T1w", - "space-longitudinal_desc-brain_mask", + "longitudinal-template_space-longitudinal_desc-brain_T1w", + "longitudinal-template_space-longitudinal_desc-head_T1w", + "longitudinal-template_space-longitudinal_desc-reorient_T1w", + "longitudinal-template_space-longitudinal_desc-brain_mask", ], ) wf = connect_pipeline(wf, config, rpool, pipeline_blocks) @@ -498,7 +510,7 @@ def anat_longitudinal_wf( ses_wf = initialize_nipype_wf(config, subject_id, unique_id) ses_wf, rpool = initiate_rpool(ses_wf, config, session) - config.pipeline_setup["pipeline_name"] = f"longitudinal_{orig_pipe_name}" + if "derivatives_dir" in session: ses_wf, rpool = ingress_output_dir( ses_wf, @@ -580,14 +592,6 @@ def anat_longitudinal_wf( "", head_select_sess.name, ) - rpool.set_data( - "from-T1w_to-longitudinal_mode-image_desc-linear_xfm", - head_select_sess, - "warp_path", - {}, - "", - head_select_sess.name, - ) rpool.set_data( "space-longitudinal_desc-brain_T1w", @@ -609,7 +613,11 @@ def anat_longitudinal_wf( config.pipeline_setup["pipeline_name"] = orig_pipe_name excl = ["space-template_desc-brain_T1w", "space-T1w_desc-brain_mask"] rpool.gather_pipes(ses_wf, config, add_excl=excl) - cross_pool_keys = ["from-longitudinal_to-template_mode-image_xfm"] + cross_pool_keys = [ + "from-longitudinal_to-template_mode-image_xfm", + "space-longitudinal_desc-brain_T1w", + "space-longitudinal_desc-reorient_T1w", + ] for key in cross_pool_keys: node, out = longitudinal_rpool.get_data(key) try: @@ -626,13 +634,9 @@ def anat_longitudinal_wf( "", f"fsl_longitudinal_{subject_id}", # "fsl" for check_prov_for_regtool ) - if not dry_run: - ses_wf.run() - pipeline_blocks = [ - warp_longitudinal_T1w_to_template, - warp_longitudinal_seg_to_T1w, - ] + pipeline_blocks += [warp_longitudinal_seg_to_T1w] + pipeline_blocks = build_segmentation_stack(rpool, config, pipeline_blocks) ses_wf = connect_pipeline(ses_wf, config, rpool, pipeline_blocks) diff --git a/CPAC/longitudinal/wf/utils.py b/CPAC/longitudinal/wf/utils.py index ce5f5fd9fe..74bbc520bb 100644 --- a/CPAC/longitudinal/wf/utils.py +++ b/CPAC/longitudinal/wf/utils.py @@ -21,7 +21,6 @@ from typing import Any, cast, Optional from networkx.classes.digraph import DiGraph -from nipype.interfaces.utility import IdentityInterface from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.interfaces.function import Function @@ -133,14 +132,6 @@ def select_session_node(unique_id: str, suffix: str = "") -> pe.Node: return select_sess -def cross_pool_resources(name: str) -> pe.Node: - """Return an IdentityInterface for cross-pool resources.""" - return pe.Node( - IdentityInterface(fields=["from-longitudinal_to-template_mode-image_xfm"]), - name=name, - ) - - def get_output_from_graph( graph: DiGraph, node: pe.Node | pe.Workflow, output_name: str ) -> Any: diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 8b9b8315af..cf4d84ec15 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -152,6 +152,9 @@ def _set_id_parts(self) -> None: """Set part_id and ses_id.""" unique_id = self.name setattr(self, "_part_id", unique_id.split("_")[0]) + if "_" not in unique_id: + setattr(self, "_ses_id", None) + return ses_id = unique_id.split("_")[1] if "ses-" not in ses_id: ses_id = f"ses-{ses_id}" @@ -1156,9 +1159,15 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): for pipe_idx in self.rpool[resource]: out_dir = cfg.pipeline_setup["output_directory"]["path"] pipe_name = cfg.pipeline_setup["pipeline_name"] - container = os.path.join( - f"pipeline_{pipe_name}", self.part_id, self.ses_id - ) + if self.ses_id: + container = os.path.join( + f"pipeline_{pipe_name}", self.part_id, self.ses_id + ) + else: + container = os.path.join(f"pipeline_{pipe_name}", self.part_id) + resource_name = self.get_name() + if resource_name.startswith("longitudinal-template_"): + resource_name = resource_name[22:] filename = f"{self.get_name()}_{res_in_filename(self.cfg, resource)}" out_path = os.path.join(out_dir, container, subdir, filename) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a2e62790bd..b07a743fff 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2264,8 +2264,14 @@ def bold_to_T1template_xfm_connector( option_val=["FSL", "FSL-linear"], inputs=[ ( - ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"], - ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], + [ + "desc-preproc_T1w", + "space-longitudinal_desc-reorient_T1w", + ], + [ + "desc-brain_T1w", + "space-longitudinal_desc-brain_T1w", + ], ), "T1w-template", "T1w-brain-template", @@ -2306,7 +2312,8 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ]["registration"]["FSL-FNIRT"]["fnirt_config"] connect, brain = strat_pool.get_data( - ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], + report_fetched=True, ) node, out = connect wf.connect(node, out, fsl, "inputspec.input_brain") @@ -2330,7 +2337,10 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, fsl, "inputspec.reference_head") node, out = strat_pool.get_data( - ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + [ + "desc-preproc_T1w", + "space-longitudinal_desc-reorient_T1w", + ] ) wf.connect(node, out, fsl, "inputspec.input_head") @@ -2348,7 +2358,7 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): outputs[new_key] = outputs[key] # del outputs[key] - return (wf, outputs) + return wf, outputs @nodeblock( @@ -2359,8 +2369,14 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): option_val=["FSL", "FSL-linear"], inputs=[ ( - ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"], - ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], + [ + "desc-preproc_T1w", + "space-longitudinal_desc-reorient_T1w", + ], + [ + "desc-brain_T1w", + "space-longitudinal_desc-brain_T1w", + ], ), "T1w-template-symmetric", "T1w-brain-template-symmetric", @@ -2409,7 +2425,8 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N ]["registration"]["FSL-FNIRT"]["fnirt_config"] connect, brain = strat_pool.get_data( - ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], + report_fetched=True, ) node, out = connect wf.connect(node, out, fsl, "inputspec.input_brain") @@ -2418,7 +2435,10 @@ def register_symmetric_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=N wf.connect(node, out, fsl, "inputspec.reference_brain") node, out = strat_pool.get_data( - ["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + [ + "desc-preproc_T1w", + "space-longitudinal_desc-reorient_T1w", + ] ) wf.connect(node, out, fsl, "inputspec.input_head") @@ -2508,7 +2528,10 @@ def register_FSL_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): option_val="ANTS", inputs=[ ( - ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], + [ + "desc-preproc_T1w", + "space-longitudinal_desc-brain_T1w", + ], [ "space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask", @@ -2616,7 +2639,8 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ]["registration"]["ANTs"]["interpolation"] connect, brain = strat_pool.get_data( - ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], + report_fetched=True, ) node, out = connect wf.connect(node, out, ants_rc, "inputspec.input_brain") @@ -2669,7 +2693,7 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ) outputs[new_key] = outputs[key] - return (wf, outputs) + return wf, outputs @nodeblock( @@ -2680,8 +2704,14 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): option_val="ANTS", inputs=[ ( - ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], - ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"], + [ + "desc-preproc_T1w", + "space-longitudinal_desc-brain_T1w", + ], + [ + "space-T1w_desc-brain_mask", + "space-longitudinal_desc-brain_mask", + ], [ "desc-head_T1w", "desc-preproc_T1w", @@ -2754,7 +2784,8 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= ]["registration"]["ANTs"]["interpolation"] connect, brain = strat_pool.get_data( - ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], + report_fetched=True, ) node, out = connect wf.connect(node, out, ants, "inputspec.input_brain") @@ -2763,7 +2794,11 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= wf.connect(node, out, ants, "inputspec.reference_brain") node, out = strat_pool.get_data( - ["desc-head_T1w", "desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"] + [ + "desc-head_T1w", + "desc-preproc_T1w", + "space-longitudinal_desc-reorient_T1w", + ] ) wf.connect(node, out, ants, "inputspec.input_head") @@ -2771,7 +2806,10 @@ def register_symmetric_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt= wf.connect(node, out, ants, "inputspec.reference_head") node, out = strat_pool.get_data( - ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"] + [ + "space-T1w_desc-brain_mask", + "space-longitudinal_desc-brain_mask", + ] ) wf.connect(node, out, ants, "inputspec.input_mask") @@ -2885,7 +2923,10 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): inputs=[ ( "desc-restore-brain_T1w", - ["desc-preproc_T1w", "space-longitudinal_desc-brain_T1w"], + [ + "desc-preproc_T1w", + "space-longitudinal_desc-brain_T1w", + ], ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", "T1w"], ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], "space-T1w_desc-brain_mask", diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index 873defbbff..e3a20f38d4 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -76,6 +76,9 @@ space-bold_label-GM_desc-eroded_mask mask functional func NIfTI space-bold_label-GM_mask mask functional func NIfTI space-bold_label-WM_desc-eroded_mask mask functional func NIfTI space-bold_label-WM_mask mask functional func NIfTI +space-longitudinal_desc-brain_T1w T1w longitudinal T1w anat NIfTI +space-longitudinal_desc-head_T1w T1w longitudinal T1w anat NIfTI +space-longitudinal_desc-brain_mask mask longitudinal T1w anat NIfTI space-longitudinal_desc-brain_mask mask longitudinal T1w anat NIfTI space-longitudinal_label-CSF_desc-preproc_mask mask longitudinal T1w anat NIfTI space-longitudinal_label-CSF_mask mask longitudinal T1w anat NIfTI @@ -160,6 +163,7 @@ space-symtemplate_desc-brain_T1w T1w symmetric template anat NIfTI Yes desc-brain_T1w T1w T1w anat NIfTI Yes desc-head_T1w T1w T1w anat NIfTI desc-preproc_T1w T1w T1w anat NIfTI +space-longitudinal_desc-brain_T1w T1w longitudinal T1w anat NIfTI desc-reorient_T1w T1w T1w anat NIfTI Yes desc-restore_T1w T1w T1w anat NIfTI desc-restore-brain_T1w T1w T1w anat NIfTI @@ -187,6 +191,7 @@ from-EPItemplate_to-bold_mode-image_desc-nonlinear_xfm xfm func NIfTI from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm xfm anat NIfTI from-longitudinal_to-symtemplate_mode-image_desc-nonlinear_xfm xfm anat NIfTI from-longitudinal_to-symtemplate_mode-image_xfm xfm anat NIfTI +from-longitudinal_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI from-longitudinal_to-template_mode-image_desc-linear_xfm xfm anat NIfTI from-longitudinal_to-template_mode-image_desc-nonlinear_xfm xfm anat NIfTI from-longitudinal_to-template_mode-image_xfm xfm anat NIfTI @@ -197,6 +202,7 @@ from-symtemplate_to-longitudinal_mode-image_xfm xfm anat NIfTI from-symtemplate_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI from-symtemplate_to-T1w_mode-image_desc-nonlinear_xfm xfm anat NIfTI from-symtemplate_to-T1w_mode-image_xfm xfm anat NIfTI +from-T1w_to-longitudinal_mode-image_desc-linear_xfm xfm anat NIfTI from-T1w_to-symtemplate_mode-image_desc-linear_xfm xfm anat NIfTI from-T1w_to-symtemplate_mode-image_desc-nonlinear_xfm xfm anat NIfTI from-T1w_to-symtemplate_mode-image_xfm xfm anat NIfTI diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index 1f85c08787..5ce14a5d77 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -495,8 +495,14 @@ def create_seg_preproc_antsJointLabel_method(wf_name="seg_preproc_templated_base option_val="FSL-FAST", inputs=[ ( - ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], - ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"], + [ + "desc-brain_T1w", + "space-longitudinal_desc-brain_T1w", + ], + [ + "space-T1w_desc-brain_mask", + "space-longitudinal_desc-brain_mask", + ], [ "from-template_to-T1w_mode-image_desc-linear_xfm", "from-template_to-longitudinal_mode-image_desc-linear_xfm", @@ -556,7 +562,8 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): ) connect, resource = strat_pool.get_data( - ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], report_fetched=True + ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], + report_fetched=True, ) node, out = connect wf.connect(node, out, segment, "in_files") @@ -644,7 +651,10 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, process_wm, "inputspec.brain") node, out = strat_pool.get_data( - ["space-T1w_desc-brain_mask", "space-longitudinal_desc-brain_mask"] + [ + "space-T1w_desc-brain_mask", + "space-longitudinal_desc-brain_mask", + ] ) wf.connect(node, out, process_csf, "inputspec.brain_mask") wf.connect(node, out, process_gm, "inputspec.brain_mask") diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 8f2810e566..eaf41fa2d8 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -178,6 +178,9 @@ def create_id_string( from CPAC.utils.bids_utils import combine_multiple_entity_instances, res_in_filename + if "longitudinal-template" in resource: + resource = resource.replace("longitudinal-template", "").replace("__", "") + if atlas_id: if "_desc-" in atlas_id: atlas, desc = atlas_id.split("_desc-") @@ -187,19 +190,23 @@ def create_id_string( atlas_id = atlas_id.replace("_desc-", "") resource = f"atlas-{atlas_id}_{resource}" - part_id = unique_id.split("_")[0] - ses_id = unique_id.split("_")[1] - if "sub-" not in part_id: - part_id = f"sub-{part_id}" - if subject_level: - out_filename = f"{part_id}_{resource}" - else: + id_parts = [] + if "_" in unique_id: + part_id, ses_id = unique_id.split("_", 1) if "ses-" not in ses_id: ses_id = f"ses-{ses_id}" - if scan_id: - out_filename = f"{part_id}_{ses_id}_task-{scan_id}_{resource}" - else: - out_filename = f"{part_id}_{ses_id}_{resource}" + id_parts.append(ses_id) + else: + part_id = unique_id + if "sub-" not in part_id: + part_id = f"sub-{part_id}" + id_parts.insert(0, part_id) + if scan_id: + if "task-" not in scan_id: + scan_id = f"task-{scan_id}" + id_parts.append(scan_id) + id_parts.append(resource) + out_filename = "_".join(id_parts) template_tag = template_desc.split(" -")[0] if template_desc else "*" for prefix in ["space-", "from-", "to-"]: