diff --git a/.circleci/main.yml b/.circleci/main.yml
index ee836c0607..320b817f38 100644
--- a/.circleci/main.yml
+++ b/.circleci/main.yml
@@ -184,8 +184,8 @@ jobs:
key: coverage-docker--{{ .Revision }}
# - restore_cache:
# key: coverage-singularity--{{ .Revision }}
- - restore_cache:
- key: coverage-docker-ABCD-HCP-{{ .Revision }}
+ # - restore_cache:
+ # key: coverage-docker-ABCD-HCP-{{ .Revision }}
# - restore_cache:
# key: coverage-singularity-ABCD-HCP-{{ .Revision }}
- restore_cache:
@@ -280,9 +280,9 @@ workflows:
# - "Test in Singularity"
- "Test lite variant in Docker"
# - "Test lite variant in Singularity"
- - "Test ABCD-HCP variant in Docker"
+ # - "Test ABCD-HCP variant in Docker"
# - "Test ABCD-HCP variant in Singularity"
- - "Test fMRIPrep-LTS variant in Docker"
+ # - "Test fMRIPrep-LTS variant in Docker"
- push-branch-to-docker-hub:
filters:
branches:
@@ -293,18 +293,18 @@ workflows:
variant:
- ""
- lite
- - ABCD-HCP
- - fMRIPrep-LTS
+ # - ABCD-HCP
+ # - fMRIPrep-LTS
requires:
- "Combine coverage"
- pytest-docker:
name: "Test in Docker"
- - pytest-docker:
- name: "Test ABCD-HCP variant in Docker"
- variant: ABCD-HCP
- - pytest-docker:
- name: "Test fMRIPrep-LTS variant in Docker"
- variant: fMRIPrep-LTS
+ # - pytest-docker:
+ # name: "Test ABCD-HCP variant in Docker"
+ # variant: ABCD-HCP
+ # - pytest-docker:
+ # name: "Test fMRIPrep-LTS variant in Docker"
+ # variant: fMRIPrep-LTS
- pytest-docker:
name: "Test lite variant in Docker"
variant: lite
diff --git a/.github/Dockerfiles/C-PAC.develop-ABCD-HCP-bionic.Dockerfile b/.github/Dockerfiles/C-PAC.develop-ABCD-HCP-bionic.Dockerfile
index 2a5760ebb7..27631dacd5 100644
--- a/.github/Dockerfiles/C-PAC.develop-ABCD-HCP-bionic.Dockerfile
+++ b/.github/Dockerfiles/C-PAC.develop-ABCD-HCP-bionic.Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/fcp-indi/c-pac/stage-base:abcd-hcp-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:abcd-hcp-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image with software dependencies version-matched to [ABCD-HCP BIDS fMRI Pipeline](https://github.com/DCAN-Labs/abcd-hcp-pipeline/blob/e480a8f99534f1b05f37bf44c64827384b69b383/Dockerfile)"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/.github/Dockerfiles/C-PAC.develop-fMRIPrep-LTS-xenial.Dockerfile b/.github/Dockerfiles/C-PAC.develop-fMRIPrep-LTS-xenial.Dockerfile
index 0e9cd3d899..3071f2de78 100644
--- a/.github/Dockerfiles/C-PAC.develop-fMRIPrep-LTS-xenial.Dockerfile
+++ b/.github/Dockerfiles/C-PAC.develop-fMRIPrep-LTS-xenial.Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/fcp-indi/c-pac/stage-base:fmriprep-lts-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:fmriprep-lts-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image with software dependencies version-matched to [fMRIPrep LTS](https://reproducibility.stanford.edu/fmriprep-lts#long-term-support-lts)"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile
index f9ced12d7b..6de7379c1d 100644
--- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile
+++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile
index 1f6f3a9ec9..98d4ed9f8f 100644
--- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile
+++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image without FreeSurfer"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile
index 2d0d51ab76..2a448e1186 100644
--- a/.github/Dockerfiles/base-standard.Dockerfile
+++ b/.github/Dockerfiles/base-standard.Dockerfile
@@ -16,7 +16,7 @@
# License along with C-PAC. If not, see .
FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy as FreeSurfer
-FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
Standard software dependencies for C-PAC standard images"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
diff --git a/.github/scripts/autoversioning.sh b/.github/scripts/autoversioning.sh
index 0543f626a1..f93dc3f57e 100755
--- a/.github/scripts/autoversioning.sh
+++ b/.github/scripts/autoversioning.sh
@@ -18,7 +18,14 @@
# License along with C-PAC. If not, see .
# Update version comment strings
-cd CPAC
+function wait_for_git_lock() {
+ while [ -f "./.git/index.lock" ]; do
+ echo "Waiting for the git lock file to be removed..."
+ sleep 1
+ done
+}
+
+cd CPAC || exit 1
VERSION=$(python -c "from info import __version__; print(('.'.join(('.'.join(__version__[::-1].split('-')[1].split('.')[1:])[::-1], __version__.split('-')[1])) if '-' in __version__ else __version__).split('+', 1)[0])")
cd ..
echo "v${VERSION}" > version
@@ -30,8 +37,8 @@ else
# Linux and others
find ./CPAC/resources/configs -name "*.yml" -exec sed -i'' -r "${_SED_COMMAND}" {} \;
fi
-git add version
-VERSIONS=( `git show $(git log --pretty=format:'%h' -n 2 version | tail -n 1):version` `cat version` )
+wait_for_git_lock && git add version
+VERSIONS=( `git show $(git log --pretty=format:'%h' -n 1 version | tail -n 1):version` `cat version` )
export PATTERN="(declare|typeset) -a"
if [[ "$(declare -p VERSIONS)" =~ $PATTERN ]]
then
@@ -52,11 +59,12 @@ then
done
unset IFS
fi
-git add CPAC/resources/configs .github/Dockerfiles
+wait_for_git_lock && git add CPAC/resources/configs .github/Dockerfiles
# Overwrite top-level Dockerfiles with the CI Dockerfiles
-cp .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile Dockerfile
-cp .github/Dockerfiles/C-PAC.develop-ABCD-HCP-bionic.Dockerfile variant-ABCD-HCP.Dockerfile
-cp .github/Dockerfiles/C-PAC.develop-fMRIPrep-LTS-xenial.Dockerfile variant-fMRIPrep-LTS.Dockerfile
-cp .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile variant-lite.Dockerfile
-git add *Dockerfile
\ No newline at end of file
+wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile Dockerfile
+wait_for_git_lock && cp .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile variant-lite.Dockerfile
+for DOCKERFILE in $(ls *Dockerfile)
+do
+ wait_for_git_lock && git add $DOCKERFILE
+done
diff --git a/.github/stage_requirements/phase_three.txt b/.github/stage_requirements/phase_three.txt
index b71c08de24..349b6827fe 100644
--- a/.github/stage_requirements/phase_three.txt
+++ b/.github/stage_requirements/phase_three.txt
@@ -1,3 +1 @@
-lite
-ABCD-HCP
-fMRIPrep-LTS
\ No newline at end of file
+lite
\ No newline at end of file
diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml
index 62a7a34f4b..44722e426d 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -317,25 +317,25 @@ jobs:
with:
variant: lite
- C-PAC-ABCD-HCP:
- needs: build-base
- uses: ./.github/workflows/build_C-PAC.yml
- with:
- variant: ABCD-HCP
+ # C-PAC-ABCD-HCP:
+ # needs: build-base
+ # uses: ./.github/workflows/build_C-PAC.yml
+ # with:
+ # variant: ABCD-HCP
- C-PAC-fMRIPrep-LTS:
- needs: build-base
- uses: ./.github/workflows/build_C-PAC.yml
- with:
- variant: fMRIPrep-LTS
+ # C-PAC-fMRIPrep-LTS:
+ # needs: build-base
+ # uses: ./.github/workflows/build_C-PAC.yml
+ # with:
+ # variant: fMRIPrep-LTS
smoke-tests-participant:
name: Run participant-level smoke tests
needs:
- C-PAC
- C-PAC-lite
- - C-PAC-ABCD-HCP
- - C-PAC-fMRIPrep-LTS
+ # - C-PAC-ABCD-HCP
+ # - C-PAC-fMRIPrep-LTS
if: github.ref_name == 'develop' || github.ref_name == 'main'
uses: ./.github/workflows/smoke_test_participant.yml
@@ -358,8 +358,8 @@ jobs:
needs:
- C-PAC
- C-PAC-lite
- - C-PAC-ABCD-HCP
- - C-PAC-fMRIPrep-LTS
+ # - C-PAC-ABCD-HCP
+ # - C-PAC-fMRIPrep-LTS
if: github.ref_type == 'branch'
runs-on: ubuntu-latest
steps:
@@ -425,22 +425,22 @@ jobs:
variant: 'lite'
secrets: inherit
- Deploy_to_Docker_Hub-ABCD-HCP:
- name: Deploy 'ABCD-HCP' to Docker Hub
- needs:
- - C-PAC-ABCD-HCP
- if: github.ref_type == 'tag'
- uses: ./.github/workflows/deploy_to_Docker_Hub.yml
- with:
- variant: 'ABCD-HCP'
- secrets: inherit
+ # Deploy_to_Docker_Hub-ABCD-HCP:
+ # name: Deploy 'ABCD-HCP' to Docker Hub
+ # needs:
+ # - C-PAC-ABCD-HCP
+ # if: github.ref_type == 'tag'
+ # uses: ./.github/workflows/deploy_to_Docker_Hub.yml
+ # with:
+ # variant: 'ABCD-HCP'
+ # secrets: inherit
- Deploy_to_Docker_Hub-fMRIPrep-LTS:
- name: Deploy 'fMRIPrep-LTS' to Docker Hub
- needs:
- - C-PAC-fMRIPrep-LTS
- if: github.ref_type == 'tag'
- uses: ./.github/workflows/deploy_to_Docker_Hub.yml
- with:
- variant: 'fMRIPrep-LTS'
- secrets: inherit
+ # Deploy_to_Docker_Hub-fMRIPrep-LTS:
+ # name: Deploy 'fMRIPrep-LTS' to Docker Hub
+ # needs:
+ # - C-PAC-fMRIPrep-LTS
+ # if: github.ref_type == 'tag'
+ # uses: ./.github/workflows/deploy_to_Docker_Hub.yml
+ # with:
+ # variant: 'fMRIPrep-LTS'
+ # secrets: inherit
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 7357d71417..442a90339f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -14,7 +14,9 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-
+ci:
+ skip:
+ - update-yaml-comments
repos:
- repo: local
hooks:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index bd2ad749db..43dfc8e019 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -14,6 +14,17 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [1.8.7.post1] - unreleased
+
+### Changed
+
+- Disabled variant image builds.
+
+### Fixed
+
+- A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows.
+- Graph-building bugs that prevented longitudinal workflows from running.
+
## [1.8.7] - 2024-05-03
### Added
@@ -294,7 +305,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
See [Version 1.8.1 Beta](https://fcp-indi.github.io/docs/user/release_notes/v1.8.1) for release notes for v1.8.1 and [Release Notes](https://fcp-indi.github.io/docs/user/release_notes) for all release notes back to v0.1.1.
-[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.6...develop
+[1.8.7.post1]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.7...v1.8.7.post1.dev3
+[1.8.7]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.7
[1.8.6]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.6
[1.8.5]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.5
[1.8.4]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.4
diff --git a/CPAC/info.py b/CPAC/info.py
index f2fcd5481b..4eb7f43700 100644
--- a/CPAC/info.py
+++ b/CPAC/info.py
@@ -43,7 +43,7 @@
_version_major = 1
_version_minor = 8
_version_micro = 7
-_version_extra = 'dev1'
+_version_extra = 'post1.dev3'
def get_cpac_gitversion():
diff --git a/CPAC/longitudinal_pipeline/longitudinal_preproc.py b/CPAC/longitudinal_pipeline/longitudinal_preproc.py
index 44f88675b4..93f2cb99f9 100644
--- a/CPAC/longitudinal_pipeline/longitudinal_preproc.py
+++ b/CPAC/longitudinal_pipeline/longitudinal_preproc.py
@@ -270,7 +270,8 @@ def flirt_node(in_img, output_img, output_mat):
return node_list
-def template_creation_flirt(input_brain_list, input_skull_list, init_reg=None, avg_method='median', dof=12,
+def template_creation_flirt(input_brain_list, input_skull_list, init_reg=None,
+ avg_method='median', dof=12,
interp='trilinear', cost='corratio', mat_type='matrix',
convergence_threshold=-1, thread_pool=2, unique_id_list=None):
"""
diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py
index fb12d49ab7..855a8b201f 100644
--- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py
+++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2020-2022 C-PAC Developers
+# Copyright (C) 2020-2024 C-PAC Developers
# This file is part of C-PAC.
@@ -15,14 +15,15 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-import os
import copy
-import time
+import os
import shutil
+import time
+from typing import cast, Optional
+
from CPAC.pipeline.nodeblock import nodeblock
-from nipype import config
-from nipype import logging
+from nipype import config as nipype_config, logging
from CPAC.pipeline import nipype_pipeline_engine as pe
import nipype.interfaces.afni as afni
import nipype.interfaces.fsl as fsl
@@ -41,7 +42,7 @@
from CPAC.pipeline.cpac_pipeline import initialize_nipype_wf, \
connect_pipeline, build_anat_preproc_stack, build_T1w_registration_stack,\
build_segmentation_stack
-from CPAC.pipeline.engine import initiate_rpool, ingress_output_dir
+from CPAC.pipeline.engine import initiate_rpool, ingress_output_dir, ResourcePool
from CPAC.registration import (
create_fsl_flirt_linear_reg,
@@ -62,6 +63,7 @@
)
from CPAC.utils import find_files, function
+from CPAC.utils.configuration import Configuration
from CPAC.utils.outputs import Outputs
from CPAC.utils.strategy import Strategy
from CPAC.utils.utils import (
@@ -278,7 +280,7 @@ def mask_longitudinal_T1w_brain(wf, cfg, strat_pool, pipe_num, opt=None):
(
"space-longitudinal_desc-brain_T1w",
"from-longitudinal_to-template_mode-image_xfm",
- )
+ ), "T1w-brain-template"
],
outputs=["space-template_desc-brain_T1w"],
)
@@ -311,7 +313,7 @@ def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num,
node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w")
wf.connect(node, out, apply_xfm, 'inputspec.input_image')
- node, out = strat_pool.get_data("T1w_brain_template")
+ node, out = strat_pool.get_data("T1w-brain-template")
wf.connect(node, out, apply_xfm, 'inputspec.reference')
node, out = \
@@ -332,7 +334,9 @@ def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num,
switch=["run"],
inputs=[
(
- "from-longitudinal_to-T1w_mode-image_desc-linear_xfm",
+ "space-longitudinal_desc-brain_T1w",
+ ["from-longitudinal_to-T1w_mode-image_desc-linear_xfm",
+ "from-T1w_to-longitudinal_mode-image_desc-linear_xfm"],
"space-longitudinal_label-CSF_mask",
"space-longitudinal_label-GM_mask",
"space-longitudinal_label-WM_mask",
@@ -342,7 +346,8 @@ def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num,
"space-longitudinal_label-CSF_probseg",
"space-longitudinal_label-GM_probseg",
"space-longitudinal_label-WM_probseg",
- )
+ ),
+ "T1w-brain-template"
],
outputs=[
"label-CSF_mask",
@@ -356,11 +361,23 @@ def warp_longitudinal_T1w_to_template(wf, cfg, strat_pool, pipe_num,
"label-WM_probseg",
],
)
-def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None):
+def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool: ResourcePool, pipe_num, opt=None):
- xfm_prov = strat_pool.get_cpac_provenance(
- 'from-longitudinal_to-T1w_mode-image_desc-linear_xfm')
- reg_tool = check_prov_for_regtool(xfm_prov)
+ if strat_pool.check_rpool("from-longitudinal_to-T1w_mode-image_desc-linear_xfm"):
+ xfm_prov = strat_pool.get_cpac_provenance(
+ "from-longitudinal_to-T1w_mode-image_desc-linear_xfm")
+ reg_tool = check_prov_for_regtool(xfm_prov)
+ xfm: tuple[pe.Node, str] = strat_pool.get_data("from-longitudinal_to-T1w_mode-image_desc-linear_xfm")
+ else:
+ xfm_prov = strat_pool.get_cpac_provenance(
+ "from-T1w_to-longitudinal_mode-image_desc-linear_xfm")
+ reg_tool = check_prov_for_regtool(xfm_prov)
+ # create inverse xfm if we don't have it
+ invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm')
+ invt.inputs.invert_xfm = True
+ wf.connect(
+ *strat_pool.get_data("from-T1w_to-longitudinal_mode-image_desc-linear_xfm"), invt, "in_file")
+ xfm = (invt, "out_file")
num_cpus = cfg.pipeline_setup['system_config'][
'max_cores_per_participant']
@@ -391,51 +408,58 @@ def warp_longitudinal_seg_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None):
'registration']['FSL-FNIRT']['interpolation']
node, out = strat_pool.get_data("space-longitudinal_desc-brain_T1w")
+
wf.connect(node, out, apply_xfm, 'inputspec.input_image')
- node, out = strat_pool.get_data("T1w_brain_template")
+ node, out = strat_pool.get_data("T1w-brain-template")
wf.connect(node, out, apply_xfm, 'inputspec.reference')
-
- node, out = \
- strat_pool.get_data("from-longitudinal_to-template_mode-image_xfm")
- wf.connect(node, out, apply_xfm, 'inputspec.transform')
+
+ wf.connect(*xfm, apply_xfm, 'inputspec.transform')
outputs[f'label-{label}'] = (apply_xfm, 'outputspec.output_image')
return (wf, outputs)
-def anat_longitudinal_wf(subject_id, sub_list, config):
+def anat_longitudinal_wf(subject_id: str, sub_list: list[dict], config: Configuration,
+ dry_run: bool = False) -> None:
"""
+ Build and run an anatomical longitudinal workflow.
+
Parameters
----------
- subject_id : str
+ subject_id
the id of the subject
- sub_list : list of dict
+ sub_list
this is a list of sessions for one subject and each session if the same dictionary as the one given to
prep_workflow
- config : configuration
+ config
a configuration object containing the information of the pipeline config. (Same as for prep_workflow)
-
- Returns
- -------
- None
+ dry_run
+ build graph without running?
"""
-
+ nipype_config.update_config({
+ 'execution': {
+ 'crashfile_format': 'txt',
+ 'stop_on_first_crash': config['pipeline_setup', 'system_config',
+ 'fail_fast']}})
+ config["subject_id"] = subject_id
# list of lists for every strategy
session_id_list = []
session_wfs = {}
cpac_dirs = []
- out_dir = config.pipeline_setup['output_directory']['path']
+ out_dir: str = config.pipeline_setup['output_directory']['path']
- orig_pipe_name = config.pipeline_setup['pipeline_name']
+ orig_pipe_name: str = config.pipeline_setup['pipeline_name']
# Loop over the sessions to create the input for the longitudinal
# algorithm
- for session in sub_list:
+ strats_dct: dict[str, list[tuple[pe.Node, str] | str]] = {"desc-brain_T1w": [],
+ "desc-head_T1w": []}
+ for i, session in enumerate(sub_list):
- unique_id = session['unique_id']
+ unique_id: str = session['unique_id']
session_id_list.append(unique_id)
try:
@@ -454,10 +478,11 @@ def anat_longitudinal_wf(subject_id, sub_list, config):
except KeyError:
input_creds_path = None
- workflow = initialize_nipype_wf(config, sub_list[0],
- # just grab the first one for the name
- name="anat_longitudinal_pre-preproc")
-
+ workflow: pe.Workflow = initialize_nipype_wf(
+ config,
+ sub_list[i],
+ name="anat_longitudinal_pre-preproc")
+ rpool: ResourcePool
workflow, rpool = initiate_rpool(workflow, config, session)
pipeline_blocks = build_anat_preproc_stack(rpool, config)
workflow = connect_pipeline(workflow, config, rpool, pipeline_blocks)
@@ -465,167 +490,155 @@ def anat_longitudinal_wf(subject_id, sub_list, config):
session_wfs[unique_id] = rpool
rpool.gather_pipes(workflow, config)
+ for key in strats_dct.keys():
+ strats_dct[key].append(cast(tuple[pe.Node, str], rpool.get_data(key)))
+ if not dry_run:
+ workflow.run()
+ for key in strats_dct.keys(): # get the outputs from run-nodes
+ for index, data in enumerate(list(strats_dct[key])):
+ if isinstance(data, tuple):
+ strats_dct[key][index] = workflow.get_output_path(*data)
+
+ wf = initialize_nipype_wf(config, sub_list[0],
+ # just grab the first one for the name
+ name=f"template_node_brain")
+
+ config.pipeline_setup[
+ 'pipeline_name'] = f'longitudinal_{orig_pipe_name}'
+
+ template_node_name = 'longitudinal_anat_template_brain'
+
+ # This node will generate the longitudinal template (the functions are
+ # in longitudinal_preproc)
+ # Later other algorithms could be added to calculate it, like the
+ # multivariate template from ANTS
+ # It would just require to change it here.
+ template_node = subject_specific_template(
+ workflow_name=template_node_name
+ )
- workflow.run()
-
- cpac_dir = os.path.join(out_dir, f'pipeline_{orig_pipe_name}',
- f'{subject_id}_{unique_id}')
- cpac_dirs.append(os.path.join(cpac_dir, 'anat'))
-
- # Now we have all the anat_preproc set up for every session
- # loop over the different anat preproc strategies
- strats_brain_dct = {}
- strats_head_dct = {}
- for cpac_dir in cpac_dirs:
- if os.path.isdir(cpac_dir):
- for filename in os.listdir(cpac_dir):
- if 'T1w.nii' in filename:
- for tag in filename.split('_'):
- if 'desc-' in tag and 'brain' in tag:
- if tag not in strats_brain_dct:
- strats_brain_dct[tag] = []
- strats_brain_dct[tag].append(os.path.join(cpac_dir,
- filename))
- if tag not in strats_head_dct:
- strats_head_dct[tag] = []
- head_file = filename.replace(tag, 'desc-reorient')
- strats_head_dct[tag].append(os.path.join(cpac_dir,
- head_file))
-
- for strat in strats_brain_dct.keys():
-
- wf = initialize_nipype_wf(config, sub_list[0],
- # just grab the first one for the name
- name=f"template_node_{strat}")
-
- config.pipeline_setup[
- 'pipeline_name'] = f'longitudinal_{orig_pipe_name}'
-
- template_node_name = f'longitudinal_anat_template_{strat}'
-
- # This node will generate the longitudinal template (the functions are
- # in longitudinal_preproc)
- # Later other algorithms could be added to calculate it, like the
- # multivariate template from ANTS
- # It would just require to change it here.
- template_node = subject_specific_template(
- workflow_name=template_node_name
- )
-
- template_node.inputs.set(
- avg_method=config.longitudinal_template_generation[
- 'average_method'],
- dof=config.longitudinal_template_generation['dof'],
- interp=config.longitudinal_template_generation['interp'],
- cost=config.longitudinal_template_generation['cost'],
- convergence_threshold=config.longitudinal_template_generation[
- 'convergence_threshold'],
- thread_pool=config.longitudinal_template_generation[
- 'thread_pool'],
- unique_id_list=list(session_wfs.keys())
- )
+ template_node.inputs.set(
+ avg_method=config.longitudinal_template_generation[
+ 'average_method'],
+ dof=config.longitudinal_template_generation['dof'],
+ interp=config.longitudinal_template_generation['interp'],
+ cost=config.longitudinal_template_generation['cost'],
+ convergence_threshold=config.longitudinal_template_generation[
+ 'convergence_threshold'],
+ thread_pool=config.longitudinal_template_generation[
+ 'thread_pool'],
+ unique_id_list=list(session_wfs.keys())
+ )
- template_node.inputs.input_brain_list = strats_brain_dct[strat]
- template_node.inputs.input_skull_list = strats_head_dct[strat]
+ num_sessions = len(strats_dct["desc-brain_T1w"])
+ merge_brains = pe.Node(Merge(num_sessions), name="merge_brains")
+ merge_skulls = pe.Node(Merge(num_sessions), name="merge_skulls")
- long_id = f'longitudinal_{subject_id}_strat-{strat}'
+ for i in list(range(0, num_sessions)):
+ _connect_node_or_path(wf, merge_brains, strats_dct, "desc-brain_T1w", i)
+ _connect_node_or_path(wf, merge_skulls, strats_dct, "desc-head_T1w", i)
+ wf.connect(merge_brains, "out", template_node, "input_brain_list")
+ wf.connect(merge_skulls, "out", template_node, "input_skull_list")
- wf, rpool = initiate_rpool(wf, config, part_id=long_id)
+ long_id = f'longitudinal_{subject_id}_strat-desc-brain_T1w'
- rpool.set_data("space-longitudinal_desc-brain_T1w",
- template_node, 'brain_template', {},
- "", template_node_name)
+ wf, rpool = initiate_rpool(wf, config, part_id=long_id)
+
+ rpool.set_data("space-longitudinal_desc-brain_T1w",
+ template_node, 'brain_template', {},
+ "", template_node_name)
- rpool.set_data("space-longitudinal_desc-brain_T1w-template",
- template_node, 'brain_template', {},
- "", template_node_name)
+ rpool.set_data("space-longitudinal_desc-brain_T1w-template",
+ template_node, 'brain_template', {},
+ "", template_node_name)
- rpool.set_data("space-longitudinal_desc-reorient_T1w",
- template_node, 'skull_template', {},
- "", template_node_name)
+ rpool.set_data("space-longitudinal_desc-reorient_T1w",
+ template_node, 'skull_template', {},
+ "", template_node_name)
- rpool.set_data("space-longitudinal_desc-reorient_T1w-template",
- template_node, 'skull_template', {},
- "", template_node_name)
+ rpool.set_data("space-longitudinal_desc-reorient_T1w-template",
+ template_node, 'skull_template', {},
+ "", template_node_name)
- pipeline_blocks = [mask_longitudinal_T1w_brain]
+ pipeline_blocks = [mask_longitudinal_T1w_brain]
- pipeline_blocks = build_T1w_registration_stack(rpool, config,
- pipeline_blocks)
+ pipeline_blocks = build_T1w_registration_stack(rpool, config,
+ pipeline_blocks, space="longitudinal")
- pipeline_blocks = build_segmentation_stack(rpool, config,
- pipeline_blocks)
+ pipeline_blocks = build_segmentation_stack(rpool, config,
+ pipeline_blocks)
- wf = connect_pipeline(wf, config, rpool, pipeline_blocks)
+ wf = connect_pipeline(wf, config, rpool, pipeline_blocks)
- excl = ['space-longitudinal_desc-brain_T1w',
- 'space-longitudinal_desc-reorient_T1w',
- 'space-longitudinal_desc-brain_mask']
- rpool.gather_pipes(wf, config, add_excl=excl)
+ excl = ['space-longitudinal_desc-brain_T1w',
+ 'space-longitudinal_desc-reorient_T1w',
+ 'space-longitudinal_desc-brain_mask']
+ rpool.gather_pipes(wf, config, add_excl=excl)
- # this is going to run multiple times!
- # once for every strategy!
+ if not dry_run:
wf.run()
- # now, just write out a copy of the above to each session
- config.pipeline_setup['pipeline_name'] = orig_pipe_name
- for session in sub_list:
+ # now, just write out a copy of the above to each session
+ config.pipeline_setup['pipeline_name'] = orig_pipe_name
+ for session in sub_list:
- unique_id = session['unique_id']
+ unique_id = session['unique_id']
- try:
- creds_path = session['creds_path']
- if creds_path and 'none' not in creds_path.lower():
- if os.path.exists(creds_path):
- input_creds_path = os.path.abspath(creds_path)
- else:
- err_msg = 'Credentials path: "%s" for subject "%s" ' \
- 'session "%s" was not found. Check this path ' \
- 'and try again.' % (creds_path, subject_id,
- unique_id)
- raise Exception(err_msg)
+ try:
+ creds_path = session['creds_path']
+ if creds_path and 'none' not in creds_path.lower():
+ if os.path.exists(creds_path):
+ input_creds_path = os.path.abspath(creds_path)
else:
- input_creds_path = None
- except KeyError:
+ err_msg = 'Credentials path: "%s" for subject "%s" ' \
+ 'session "%s" was not found. Check this path ' \
+ 'and try again.' % (creds_path, subject_id,
+ unique_id)
+ raise Exception(err_msg)
+ else:
input_creds_path = None
+ except KeyError:
+ input_creds_path = None
- wf = initialize_nipype_wf(config, sub_list[0])
-
- wf, rpool = initiate_rpool(wf, config, session)
-
- config.pipeline_setup[
- 'pipeline_name'] = f'longitudinal_{orig_pipe_name}'
- rpool = ingress_output_dir(config, rpool, long_id,
- creds_path=input_creds_path)
+ wf = initialize_nipype_wf(config, sub_list[0])
- select_node_name = f'select_{unique_id}'
- select_sess = pe.Node(Function(input_names=['session',
- 'output_brains',
- 'warps'],
- output_names=['brain_path',
- 'warp_path'],
- function=select_session),
- name=select_node_name)
- select_sess.inputs.session = unique_id
+ wf, rpool = initiate_rpool(wf, config, session, rpool=rpool)
- wf.connect(template_node, 'output_brain_list', select_sess,
- 'output_brains')
- wf.connect(template_node, 'warp_list', select_sess, 'warps')
+ config.pipeline_setup[
+ 'pipeline_name'] = f'longitudinal_{orig_pipe_name}'
+ if "derivatives_dir" in session:
+ rpool = ingress_output_dir(
+ wf, config, rpool, long_id, data_paths=session, part_id=subject_id,
+ ses_id=unique_id, creds_path=input_creds_path)
+
+ select_node_name = f'FSL_select_{unique_id}'
+ select_sess = pe.Node(Function(input_names=['session',
+ 'output_brains',
+ 'warps'],
+ output_names=['brain_path', 'warp_path'],
+ function=select_session),
+ name=select_node_name)
+ select_sess.inputs.session = unique_id
+
+ wf.connect(template_node, 'output_brain_list', select_sess,
+ 'output_brains')
+ wf.connect(template_node, 'warp_list', select_sess, 'warps')
- rpool.set_data("space-longitudinal_desc-brain_T1w",
- select_sess, 'brain_path', {}, "",
- select_node_name)
+ rpool.set_data("space-longitudinal_desc-brain_T1w",
+ select_sess, 'brain_path', {}, "",
+ select_node_name)
- rpool.set_data("from-T1w_to-longitudinal_mode-image_"
- "desc-linear_xfm",
- select_sess, 'warp_path', {}, "",
- select_node_name)
+ rpool.set_data("from-T1w_to-longitudinal_mode-image_"
+ "desc-linear_xfm",
+ select_sess, 'warp_path', {}, "",
+ select_node_name)
- config.pipeline_setup['pipeline_name'] = orig_pipe_name
- excl = ['space-template_desc-brain_T1w',
- 'space-T1w_desc-brain_mask']
+ config.pipeline_setup['pipeline_name'] = orig_pipe_name
+ excl = ['space-template_desc-brain_T1w',
+ 'space-T1w_desc-brain_mask']
- rpool.gather_pipes(wf, config, add_excl=excl)
+ rpool.gather_pipes(wf, config, add_excl=excl)
+ if not dry_run:
wf.run()
# begin single-session stuff again
@@ -651,7 +664,7 @@ def anat_longitudinal_wf(subject_id, sub_list, config):
wf = initialize_nipype_wf(config, sub_list[0])
- wf, rpool = initiate_rpool(wf, config, session)
+ wf, rpool = initiate_rpool(wf, config, session, rpool=rpool)
pipeline_blocks = [warp_longitudinal_T1w_to_template,
warp_longitudinal_seg_to_T1w]
@@ -659,12 +672,10 @@ def anat_longitudinal_wf(subject_id, sub_list, config):
wf = connect_pipeline(wf, config, rpool, pipeline_blocks)
rpool.gather_pipes(wf, config)
-
# this is going to run multiple times!
# once for every strategy!
- wf.run()
-
-
+ if not dry_run:
+ wf.run()
# TODO check:
@@ -1175,7 +1186,6 @@ def func_longitudinal_template_wf(subject_id, strat_list, config):
workflow.connect(merge_func_preproc_node, 'brain_list',
template_node, 'input_brain_list')
-
workflow.connect(merge_func_preproc_node, 'skull_list',
template_node, 'input_skull_list')
@@ -1190,3 +1200,11 @@ def func_longitudinal_template_wf(subject_id, strat_list, config):
workflow.run()
return
+
+def _connect_node_or_path(wf: pe.Workflow, node: pe.Node, strats_dct: dict[str, list[tuple[pe.Node, str] | str]], key: str, index: int) -> None:
+ """Set input appropriately for either a Node or a path string."""
+ input: str = f"in{index + 1}"
+ if isinstance(strats_dct[key][index], str):
+ setattr(node.inputs, input, strats_dct[key][index])
+ else:
+ wf.connect(*strats_dct[key][index], node, input)
diff --git a/CPAC/nuisance/bandpass.py b/CPAC/nuisance/bandpass.py
index 91c72124c8..b5d2148fa8 100644
--- a/CPAC/nuisance/bandpass.py
+++ b/CPAC/nuisance/bandpass.py
@@ -110,16 +110,16 @@ def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs,
img.to_filename(regressor_bandpassed_file)
else:
- with open(regressor_file, 'r') as f:
- header = []
-
- # header wouldn't be longer than 5, right? I don't want to
- # loop over the whole file
- for i in range(5):
- line = f.readline()
- if line.startswith('#') or isinstance(line[0], str):
+ header = []
+ with open(regressor_file, "r") as _f:
+ # Each leading line that doesn't start with a number goes into the header
+ for line in _f.readlines():
+ try:
+ float(line.split()[0])
+ break
+ except ValueError:
header.append(line)
-
+
# usecols=[list]
regressor = np.loadtxt(regressor_file, skiprows=len(header))
Yc = regressor - np.tile(regressor.mean(0), (regressor.shape[0], 1))
diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py
index 471ead78d8..a9df746179 100644
--- a/CPAC/pipeline/cpac_pipeline.py
+++ b/CPAC/pipeline/cpac_pipeline.py
@@ -17,6 +17,7 @@
import os
import sys
import time
+from typing import Literal
import csv
import shutil
import pickle
@@ -97,8 +98,7 @@
coregistration,
create_func_to_T1template_xfm,
create_func_to_T1template_symmetric_xfm,
- warp_wholeheadT1_to_template,
- warp_T1mask_to_template,
+ warp_to_template,
apply_phasediff_to_timeseries_separately,
apply_blip_to_timeseries_separately,
warp_timeseries_to_T1template,
@@ -1033,25 +1033,26 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None):
return pipeline_blocks
-def build_T1w_registration_stack(rpool, cfg, pipeline_blocks=None):
+def build_T1w_registration_stack(rpool, cfg, pipeline_blocks=None,
+ space: Literal["longitudinal", "T1w"] = "T1w"):
if not pipeline_blocks:
pipeline_blocks = []
reg_blocks = []
- if not rpool.check_rpool('from-T1w_to-template_mode-image_xfm'):
+ if not rpool.check_rpool(f'from-{space}_to-template_mode-image_xfm'):
reg_blocks = [
[register_ANTs_anat_to_template, register_FSL_anat_to_template],
overwrite_transform_anat_to_template,
- warp_wholeheadT1_to_template,
- warp_T1mask_to_template
+ warp_to_template("wholehead", space),
+ warp_to_template("mask", space)
]
if not rpool.check_rpool('desc-restore-brain_T1w'):
reg_blocks.append(correct_restore_brain_intensity_abcd)
if cfg.voxel_mirrored_homotopic_connectivity['run']:
- if not rpool.check_rpool('from-T1w_to-symtemplate_mode-image_xfm'):
+ if not rpool.check_rpool(f'from-{space}_to-symtemplate_mode-image_xfm'):
reg_blocks.append([register_symmetric_ANTs_anat_to_template,
register_symmetric_FSL_anat_to_template])
pipeline_blocks += reg_blocks
diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py
index 30d6b0fc18..58bbd5d578 100644
--- a/CPAC/pipeline/cpac_runner.py
+++ b/CPAC/pipeline/cpac_runner.py
@@ -191,7 +191,7 @@ def run_cpac_on_cluster(config_file, subject_list_file,
f.write(pid)
-def run_T1w_longitudinal(sublist, cfg):
+def run_T1w_longitudinal(sublist, cfg, dry_run: bool = False):
subject_id_dict = {}
for sub in sublist:
@@ -206,7 +206,7 @@ def run_T1w_longitudinal(sublist, cfg):
for subject_id, sub_list in subject_id_dict.items():
if len(sub_list) > 1:
valid_longitudinal_data = True
- anat_longitudinal_wf(subject_id, sub_list, cfg)
+ anat_longitudinal_wf(subject_id, sub_list, cfg, dry_run=dry_run)
elif len(sub_list) == 1:
warnings.warn("\n\nThere is only one anatomical session "
"for sub-%s. Longitudinal preprocessing "
@@ -422,7 +422,7 @@ def run(subject_list_file, config_file=None, p_name=None, plugin=None,
if hasattr(c, 'longitudinal_template_generation') and \
c.longitudinal_template_generation['run']:
- run_T1w_longitudinal(sublist, c)
+ run_T1w_longitudinal(sublist, c, dry_run=test_config)
# TODO functional longitudinal pipeline
'''
diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py
index 09be47823a..1f697e0667 100644
--- a/CPAC/pipeline/engine.py
+++ b/CPAC/pipeline/engine.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2021-2023 C-PAC Developers
+# Copyright (C) 2021-2024 C-PAC Developers
# This file is part of C-PAC.
@@ -21,6 +21,8 @@
from itertools import chain
import logging
import os
+from pathlib import Path
+import pickle
import re
from typing import Any, Optional, Union
import warnings
@@ -954,7 +956,7 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x,
def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None):
excl = []
substring_excl = []
- outputs_logger = getLogger(f'{cfg["subject_id"]}_expectedOutputs')
+ outputs_logger = getLogger(f'{cfg.get("subject_id", getattr(wf, "name", ""))}_expectedOutputs')
expected_outputs = ExpectedOutputs()
if add_excl:
@@ -1094,7 +1096,10 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None):
unlabelled.remove(key)
# del all_forks
for pipe_idx in self.rpool[resource]:
- pipe_x = self.get_pipe_number(pipe_idx)
+ try:
+ pipe_x = self.get_pipe_number(pipe_idx)
+ except ValueError:
+ continue
json_info = self.rpool[resource][pipe_idx]['json']
out_dct = self.rpool[resource][pipe_idx]['out']
@@ -2335,7 +2340,9 @@ def _set_nested(attr, keys):
return rpool
-def initiate_rpool(wf, cfg, data_paths=None, part_id=None):
+def initiate_rpool(
+ wf, cfg, data_paths=None, part_id=None, *, rpool: Optional[ResourcePool] = None
+):
'''
data_paths format:
@@ -2374,7 +2381,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None):
unique_id = part_id
creds_path = None
- rpool = ResourcePool(name=unique_id, cfg=cfg)
+ rpool = ResourcePool(rpool=rpool.rpool if rpool else None, name=unique_id, cfg=cfg)
if data_paths:
# ingress outdir
diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py
index 31092e5269..b36bc6db67 100644
--- a/CPAC/pipeline/nipype_pipeline_engine/engine.py
+++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py
@@ -51,8 +51,9 @@
import re
from copy import deepcopy
from inspect import Parameter, Signature, signature
-from typing import ClassVar, Optional, Union
+from typing import ClassVar, Optional, TYPE_CHECKING, Union
from nibabel import load
+from nipype.interfaces.base.support import InterfaceResult
from nipype.interfaces.utility import Function
from nipype.pipeline import engine as pe
from nipype.pipeline.engine.utils import (
@@ -71,6 +72,8 @@
from traits.trait_handlers import TraitListObject
from CPAC.utils.monitoring.custom_logging import getLogger
from CPAC.utils.typing import DICT
+if TYPE_CHECKING:
+ from CPAC.pipeline.engine import ResourcePool
# set global default mem_gb
DEFAULT_MEM_GB = 2.0
@@ -496,6 +499,26 @@ def __init__(self, name, base_dir=None, debug=False):
self._nodes_cache = set()
self._nested_workflows_cache = set()
+ def copy_input_connections(self, node1: pe.Node, node2: pe.Node) -> None:
+ """Copy input connections from ``node1`` to ``node2``."""
+ new_connections: list[tuple[pe.Node, str, pe.Node, str]] = []
+ for connection in self._graph.edges:
+ _out: pe.Node
+ _in: pe.Node
+ _out, _in = connection
+ if _in == node1:
+ details = self._graph.get_edge_data(*connection)
+ if "connect" in details:
+ for connect in details["connect"]:
+ new_connections.append((_out, connect[0], node2, connect[1]))
+ for connection in new_connections:
+ try:
+ self.connect(*connection)
+ except Exception:
+ # connection already exists
+ continue
+
+
def _configure_exec_nodes(self, graph):
"""Ensure that each node knows where to get inputs from"""
for node in graph.nodes():
@@ -644,6 +667,16 @@ def _get_dot(
logger.debug("cross connection: %s", dotlist[-1])
return ("\n" + prefix).join(dotlist)
+ def get_output_path(self, node: pe.Node, out: str) -> str:
+ """Get an output path from an already-run Node."""
+ try:
+ _run_node: pe.Node = [_ for _ in self.run(updatehash=True).nodes if _.fullname == node.fullname][0]
+ except IndexError as index_error:
+ msg = f"Could not find {node.fullname} in {self}'s run Nodes."
+ raise LookupError(msg) from index_error
+ _res: InterfaceResult = _run_node.run()
+ return getattr(_res.outputs, out)
+
def _handle_just_in_time_exception(self, node):
# pylint: disable=protected-access
if hasattr(self, '_local_func_scans'):
diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py
index 8658aae219..a73ff840b2 100644
--- a/CPAC/registration/registration.py
+++ b/CPAC/registration/registration.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2023 C-PAC Developers
+# Copyright (C) 2012-2024 C-PAC Developers
# This file is part of C-PAC.
@@ -15,9 +15,10 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
# pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order
-from typing import Optional
+from copy import deepcopy
+from typing import Literal, Optional
from CPAC.pipeline import nipype_pipeline_engine as pe
-from CPAC.pipeline.nodeblock import nodeblock
+from CPAC.pipeline.nodeblock import nodeblock, NodeBlockFunction
from nipype.interfaces import afni, ants, c3, fsl, utility as util
from nipype.interfaces.afni import utils as afni_utils
@@ -32,7 +33,8 @@
hardcoded_reg, \
one_d_to_mat, \
run_c3d, \
- run_c4d
+ run_c4d, \
+ prepend_space
from CPAC.utils.interfaces.fsl import Merge as fslMerge
from CPAC.utils.typing import LIST_OR_STR, TUPLE
from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool
@@ -2365,13 +2367,12 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None):
wf.connect(node, out, ants_rc, 'inputspec.lesion_mask')
if 'space-longitudinal' in brain:
- for key in outputs:
+ for key in list(outputs.keys()):
for direction in ['from', 'to']:
if f'{direction}-T1w' in key:
new_key = key.replace(f'{direction}-T1w',
- f'{direction}-longitudinal')
+ f'{direction}-longitudinal')
outputs[new_key] = outputs[key]
- del outputs[key]
return (wf, outputs)
@@ -3512,115 +3513,94 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num,
return (wf, outputs)
-@nodeblock(
- name="transform_whole_head_T1w_to_T1template",
- config=["registration_workflows", "anatomical_registration"],
- switch=["run"],
- inputs=[
- (
- "desc-head_T1w",
- "from-T1w_to-template_mode-image_xfm",
- "space-template_desc-head_T1w",
- ),
- "T1w-template",
- ],
- outputs={"space-template_desc-head_T1w": {"Template": "T1w-template"}},
-)
-def warp_wholeheadT1_to_template(wf, cfg, strat_pool, pipe_num, opt=None):
-
- xfm_prov = strat_pool.get_cpac_provenance(
- 'from-T1w_to-template_mode-image_xfm')
- reg_tool = check_prov_for_regtool(xfm_prov)
-
- num_cpus = cfg.pipeline_setup['system_config'][
- 'max_cores_per_participant']
-
- num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads']
-
- apply_xfm = apply_transform(f'warp_wholehead_T1w_to_T1template_{pipe_num}',
- reg_tool, time_series=False, num_cpus=num_cpus,
- num_ants_cores=num_ants_cores)
-
- if reg_tool == 'ants':
- apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[
- 'functional_registration']['func_registration_to_template'][
- 'ANTs_pipelines']['interpolation']
- elif reg_tool == 'fsl':
- apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[
- 'functional_registration']['func_registration_to_template'][
- 'FNIRT_pipelines']['interpolation']
-
- connect = strat_pool.get_data("desc-head_T1w")
- node, out = connect
- wf.connect(node, out, apply_xfm, 'inputspec.input_image')
-
- node, out = strat_pool.get_data("T1w-template")
- wf.connect(node, out, apply_xfm, 'inputspec.reference')
-
- node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm")
- wf.connect(node, out, apply_xfm, 'inputspec.transform')
-
- outputs = {
- 'space-template_desc-head_T1w': (apply_xfm, 'outputspec.output_image')
- }
-
- return (wf, outputs)
-
+def warp_to_template(warp_what: Literal["mask", "wholehead"],
+ space_from: Literal["longitudinal", "T1w"]) -> NodeBlockFunction:
+ """Get a NodeBlockFunction to transform a resource from ``space`` to template.
-@nodeblock(
- name="transform_T1mask_to_T1template",
- switch=[
- ["registration_workflows", "anatomical_registration", "run"],
- ["anatomical_preproc", "run"],
- ["anatomical_preproc", "brain_extraction", "run"],
- ],
- inputs=[
- ("space-T1w_desc-brain_mask", "from-T1w_to-template_mode-image_xfm"),
- "T1w-template",
- ],
- outputs={"space-template_desc-brain_mask": {"Template": "T1w-template"}},
-)
-def warp_T1mask_to_template(wf, cfg, strat_pool, pipe_num, opt=None):
-
- xfm_prov = strat_pool.get_cpac_provenance(
- 'from-T1w_to-template_mode-image_xfm')
- reg_tool = check_prov_for_regtool(xfm_prov)
-
- num_cpus = cfg.pipeline_setup['system_config'][
- 'max_cores_per_participant']
-
- num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads']
-
- apply_xfm = apply_transform(f'warp_T1mask_to_T1template_{pipe_num}',
- reg_tool, time_series=False, num_cpus=num_cpus,
- num_ants_cores=num_ants_cores)
+ The resource being warped needs to be the first list or string in the tuple
+ in the first position of the decorator's "inputs".
+ """
+ _decorators = {"mask": {
+ "name": f"transform_{space_from}-mask_to_T1-template",
+ "switch": [
+ ["registration_workflows", "anatomical_registration", "run"],
+ ["anatomical_preproc", "run"],
+ ["anatomical_preproc", "brain_extraction", "run"],
+ ],
+ "inputs": [
+ (f"space-{space_from}_desc-brain_mask",
+ f"from-{space_from}_to-template_mode-image_xfm"),
+ "T1w-template",
+ ],
+ "outputs": {"space-template_desc-brain_mask": {"Template": "T1w-template"}},
+ }, "wholehead": {
+ "name": f"transform_wholehead_{space_from}_to_T1template",
+ "config": ["registration_workflows", "anatomical_registration"],
+ "switch": ["run"],
+ "inputs": [
+ (
+ ["desc-head_T1w", "desc-reorient_T1w"],
+ [f"from-{space_from}_to-template_mode-image_xfm",
+ f"from-{space_from}_to-template_mode-image_xfm"],
+ "space-template_desc-head_T1w",
+ ),
+ "T1w-template",
+ ],
+ "outputs": {"space-template_desc-head_T1w": {"Template": "T1w-template"}},
+ }}
+ if space_from != "T1w":
+ _decorators[warp_what]["inputs"][0] = tuple((prepend_space(
+ _decorators[warp_what]["inputs"][0][0], space_from),
+ *_decorators[warp_what]["inputs"][0][1:]
+ ))
+
+ @nodeblock(**_decorators[warp_what])
+ def warp_to_template_fxn(wf, cfg, strat_pool, pipe_num, opt=None):
+ """Transform a resource to template space."""
+
+ xfm_prov = strat_pool.get_cpac_provenance(
+ f'from-{space_from}_to-template_mode-image_xfm')
+ reg_tool = check_prov_for_regtool(xfm_prov)
+
+ num_cpus = cfg.pipeline_setup['system_config'][
+ 'max_cores_per_participant']
+
+ num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads']
+
+ apply_xfm = apply_transform(
+ f'warp_{space_from}{warp_what}_to_T1template_{pipe_num}',
+ reg_tool, time_series=False, num_cpus=num_cpus,
+ num_ants_cores=num_ants_cores)
+
+ if warp_what == "mask":
+ apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor"
+ else:
+ tool = "ANTs" if reg_tool == 'ants' else 'FNIRT' if reg_tool == 'fsl' else None
+ if not tool:
+ msg = f"Warp {warp_what} to template not implemented for {reg_tool}."
+ raise NotImplementedError(msg)
+ apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[
+ 'functional_registration']['func_registration_to_template'][
+ f'{tool}_pipelines']['interpolation']
- apply_xfm.inputs.inputspec.interpolation = "NearestNeighbor"
- '''
- if reg_tool == 'ants':
- apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[
- 'functional_registration']['func_registration_to_template'][
- 'ANTs_pipelines']['interpolation']
- elif reg_tool == 'fsl':
- apply_xfm.inputs.inputspec.interpolation = cfg.registration_workflows[
- 'functional_registration']['func_registration_to_template'][
- 'FNIRT_pipelines']['interpolation']
- '''
- connect = strat_pool.get_data("space-T1w_desc-brain_mask")
- node, out = connect
- wf.connect(node, out, apply_xfm, 'inputspec.input_image')
+ # the resource being warped needs to be inputs[0][0] for this
+ node, out = strat_pool.get_data(_decorators[warp_what]["inputs"][0][0])
+ wf.connect(node, out, apply_xfm, 'inputspec.input_image')
- node, out = strat_pool.get_data("T1w-template")
- wf.connect(node, out, apply_xfm, 'inputspec.reference')
+ node, out = strat_pool.get_data("T1w-template")
+ wf.connect(node, out, apply_xfm, 'inputspec.reference')
- node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm")
- wf.connect(node, out, apply_xfm, 'inputspec.transform')
+ node, out = strat_pool.get_data(f"from-{space_from}_to-template_mode-image_xfm")
+ wf.connect(node, out, apply_xfm, 'inputspec.transform')
- outputs = {
- 'space-template_desc-brain_mask': (apply_xfm, 'outputspec.output_image')
- }
+ outputs = {
+ # there's only one output, so that's what we give here
+ list(_decorators[warp_what]["outputs"].keys())[0]: (
+ apply_xfm, 'outputspec.output_image')
+ }
- return (wf, outputs)
+ return wf, outputs
+ return warp_to_template_fxn
@nodeblock(
diff --git a/CPAC/registration/utils.py b/CPAC/registration/utils.py
index 1185f0190b..2bf3d62850 100644
--- a/CPAC/registration/utils.py
+++ b/CPAC/registration/utils.py
@@ -1,4 +1,22 @@
+# Copyright (C) 2014-2024 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+# pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order
import os
+from typing import overload
import numpy as np
@@ -638,3 +656,18 @@ def run_c4d(input, output_name):
os.system(cmd)
return output1, output2, output3
+
+
+@overload
+def prepend_space(resource: list[str], space: str) -> list[str]: ...
+@overload
+def prepend_space(resource: str, space: str) -> str: ...
+def prepend_space(resource: str | list[str], space: str) -> str | list[str]:
+ """Given a resource or list of resources, return same but with updated space."""
+ if isinstance(resource, list):
+ return [prepend_space(_, space) for _ in resource]
+ if "space" not in resource:
+ return f"space-{space}_{resource}"
+ pre, post = resource.split("space-")
+ _old_space, post = post.split("_", 1)
+ return f"space-{space}_".join([pre, post])
diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml
index d674131633..d3c4f32731 100644
--- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml
+++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml
index cae981cd9a..133a7ca488 100644
--- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml
+++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml
index 0ef6d3ccf7..9d64140664 100644
--- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml
+++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml
index 77a62527e6..3a0aec44ba 100644
--- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml
+++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml
index 15460d264c..f487a259db 100644
--- a/CPAC/resources/configs/data_config_cpac_benchmark.yml
+++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml
index 1d6c1b912e..3378ac6c7c 100644
--- a/CPAC/resources/configs/data_settings_template.yml
+++ b/CPAC/resources/configs/data_settings_template.yml
@@ -1,5 +1,5 @@
# CPAC Data Settings File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml
index 996c2386f0..da828118df 100644
--- a/CPAC/resources/configs/group_config_template.yml
+++ b/CPAC/resources/configs/group_config_template.yml
@@ -1,5 +1,5 @@
# CPAC Group-Level Analysis Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml
index 10ea6bece3..187e7be282 100644
--- a/CPAC/resources/configs/pipeline_config_abcd-options.yml
+++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml
index 7aee4e80ad..18f9fab116 100644
--- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml
+++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml
index 01e902abc0..37a80923d9 100644
--- a/CPAC/resources/configs/pipeline_config_anat-only.yml
+++ b/CPAC/resources/configs/pipeline_config_anat-only.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml
index f59be35958..aa9f757f3c 100644
--- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml
+++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml
index 8fb774aa80..797f2909e1 100644
--- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml
+++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml
index f4c049b1b5..c8721ab893 100644
--- a/CPAC/resources/configs/pipeline_config_blank.yml
+++ b/CPAC/resources/configs/pipeline_config_blank.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml
index 61207908f0..037a5cdd41 100644
--- a/CPAC/resources/configs/pipeline_config_ccs-options.yml
+++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml
index 9a11649dee..3f1d4cb814 100644
--- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml
+++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml
index 71609ce58d..202d452807 100644
--- a/CPAC/resources/configs/pipeline_config_default.yml
+++ b/CPAC/resources/configs/pipeline_config_default.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml
index cb81ba5958..bda6bfdf9e 100644
--- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml
+++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml
index b432770220..c52402fe7d 100644
--- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml
+++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml
index ade6c66940..b88215e423 100644
--- a/CPAC/resources/configs/pipeline_config_fx-options.yml
+++ b/CPAC/resources/configs/pipeline_config_fx-options.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml
index b598e38df3..90a9d34209 100644
--- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml
+++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml
index 09bf8d48ef..1bb83e3a4a 100644
--- a/CPAC/resources/configs/pipeline_config_monkey.yml
+++ b/CPAC/resources/configs/pipeline_config_monkey.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml
index 70d27d8f7b..1dedcc640c 100644
--- a/CPAC/resources/configs/pipeline_config_ndmg.yml
+++ b/CPAC/resources/configs/pipeline_config_ndmg.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml
index bd4bb18b92..24df5e033b 100644
--- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml
+++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml
index 8326a41696..ee146d07c2 100644
--- a/CPAC/resources/configs/pipeline_config_preproc.yml
+++ b/CPAC/resources/configs/pipeline_config_preproc.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml
index eb27e51c8e..2e10e6d2e0 100644
--- a/CPAC/resources/configs/pipeline_config_rbc-options.yml
+++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml
index fccbb36fc1..bc3e4d3897 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-1.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml
index 08d546bdc2..683ac46bc1 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-2.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml
index 77febb731f..409ac8fbfb 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-3.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml
index 308d64cefa..995f801adc 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-4.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml
index f829296a26..c11b6fded2 100644
--- a/CPAC/resources/configs/pipeline_config_rodent.yml
+++ b/CPAC/resources/configs/pipeline_config_rodent.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml
index 291adc19a9..af764667e3 100644
--- a/CPAC/resources/configs/system_config.yml
+++ b/CPAC/resources/configs/system_config.yml
@@ -1,5 +1,5 @@
# C-PAC System Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml
index 97679f1f76..c5d08ffe5b 100644
--- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml
+++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml
index 3d1a4f46ca..45f1a27a5c 100644
--- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml
+++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml
index 080595f840..6a778b10f7 100644
--- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml
+++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml
index 0d428b0340..c58457088f 100644
--- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml
+++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml
index 1d55f02e82..25f78befaf 100644
--- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml
+++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml
@@ -1,5 +1,5 @@
# CPAC Data Configuration File
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml
index 39023ec11d..9890639a23 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml
index e68f31d827..ba467b15a5 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml
index 510e3a7ef9..c9f9425a25 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml
index e6940dc729..616a978fc7 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml
index 0e9e8f34c5..b68daccf1a 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml
index 3326d3427f..416296fdde 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml
index 8f4faae6c1..2b6a15ebb5 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml
index 8f4faae6c1..2b6a15ebb5 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml
index 8f4faae6c1..2b6a15ebb5 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml
index 8f4faae6c1..2b6a15ebb5 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml
index 8f4faae6c1..2b6a15ebb5 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml
index b6feb9c42c..5f03ecf00f 100644
--- a/CPAC/resources/configs/test_configs/pipe-test_all.yml
+++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml
@@ -1,7 +1,7 @@
%YAML 1.1
---
# CPAC Pipeline Configuration YAML file
-# Version 1.8.7.dev1
+# Version 1.8.7.post1.dev3
#
# http://fcp-indi.github.io for more info.
#
diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py
index 78d40703d9..c060dfdbd9 100644
--- a/CPAC/seg_preproc/seg_preproc.py
+++ b/CPAC/seg_preproc/seg_preproc.py
@@ -434,29 +434,10 @@ def create_seg_preproc_antsJointLabel_method(
"GM-path",
"WM-path",
],
- outputs=[
- "label-CSF_mask",
- "label-GM_mask",
- "label-WM_mask",
- "label-CSF_desc-preproc_mask",
- "label-GM_desc-preproc_mask",
- "label-WM_desc-preproc_mask",
- "label-CSF_probseg",
- "label-GM_probseg",
- "label-WM_probseg",
- "label-CSF_pveseg",
- "label-GM_pveseg",
- "label-WM_pveseg",
- "space-longitudinal_label-CSF_mask",
- "space-longitudinal_label-GM_mask",
- "space-longitudinal_label-WM_mask",
- "space-longitudinal_label-CSF_desc-preproc_mask",
- "space-longitudinal_label-GM_desc-preproc_mask",
- "space-longitudinal_label-WM_desc-preproc_mask",
- "space-longitudinal_label-CSF_probseg",
- "space-longitudinal_label-GM_probseg",
- "space-longitudinal_label-WM_probseg",
- ],
+ outputs=[f"{long}label-{tissue}_{entity}" for
+ long in ["", "space-longitudinal_"] for
+ tissue in ["CSF", "GM", "WM"] for
+ entity in ["mask", "desc-preproc_mask", "probseg", "pveseg"]],
)
def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None):
# FSL-FAST
diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py
index b66ffa39a9..7712fa3b63 100644
--- a/CPAC/utils/configuration/configuration.py
+++ b/CPAC/utils/configuration/configuration.py
@@ -18,7 +18,7 @@
License along with C-PAC. If not, see ."""
import os
import re
-from typing import Optional
+from typing import Any, Optional
from warnings import warn
import pkg_resources as p
from click import BadParameter
@@ -244,10 +244,14 @@ def __sub__(self: 'Configuration', other: 'Configuration'):
'''
return(dct_diff(self.dict(), other.dict()))
- def dict(self):
+ def dict(self) -> dict[Any, Any]:
'''Show contents of a C-PAC configuration as a dict'''
return {k: v for k, v in self.__dict__.items() if not callable(v)}
+ def get(self, key: Any, default: Any = None, /) -> Any:
+ '''Convenience access to self.dict().get'''
+ return self.dict().get(key, default)
+
def keys(self):
'''Show toplevel keys of a C-PAC configuration dict'''
return self.dict().keys()
diff --git a/Dockerfile b/Dockerfile
index f9ced12d7b..6de7379c1d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/variant-ABCD-HCP.Dockerfile b/variant-ABCD-HCP.Dockerfile
index 2a5760ebb7..27631dacd5 100644
--- a/variant-ABCD-HCP.Dockerfile
+++ b/variant-ABCD-HCP.Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/fcp-indi/c-pac/stage-base:abcd-hcp-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:abcd-hcp-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image with software dependencies version-matched to [ABCD-HCP BIDS fMRI Pipeline](https://github.com/DCAN-Labs/abcd-hcp-pipeline/blob/e480a8f99534f1b05f37bf44c64827384b69b383/Dockerfile)"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/variant-fMRIPrep-LTS.Dockerfile b/variant-fMRIPrep-LTS.Dockerfile
index 0e9cd3d899..3071f2de78 100644
--- a/variant-fMRIPrep-LTS.Dockerfile
+++ b/variant-fMRIPrep-LTS.Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/fcp-indi/c-pac/stage-base:fmriprep-lts-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:fmriprep-lts-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image with software dependencies version-matched to [fMRIPrep LTS](https://reproducibility.stanford.edu/fmriprep-lts#long-term-support-lts)"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile
index 1f6f3a9ec9..98d4ed9f8f 100644
--- a/variant-lite.Dockerfile
+++ b/variant-lite.Dockerfile
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.7.dev1
+FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.7.post1.dev3
LABEL org.opencontainers.image.description "Full C-PAC image without FreeSurfer"
LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
USER root
diff --git a/version b/version
index f4c717aa49..3b40862c87 100644
--- a/version
+++ b/version
@@ -1 +1 @@
-v1.8.7.dev1
+v1.8.7.post1.dev3