diff --git a/.circleci/main.yml b/.circleci/main.yml
index a13300a78d..4dfc06f738 100644
--- a/.circleci/main.yml
+++ b/.circleci/main.yml
@@ -45,10 +45,6 @@ commands:
- run:
name: "Configuring git user"
command: |
- sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 78BD65473CB3BD13
- curl -L https://packagecloud.io/circleci/trusty/gpgkey | sudo apt-key add -
- sudo apt-get update
- sudo apt-get install git -y
git config --global user.email "CMI_CPAC_Support@childmind.org"
git config --global user.name "Theodore (machine user) @ CircleCI"
create-docker-test-container:
@@ -64,11 +60,6 @@ commands:
mkdir -p ~/project/test-results
docker pull ${DOCKER_TAG}
docker run -v /etc/passwd:/etc/passwd --user=$(id -u):c-pac -dit -P -e COVERAGE_FILE=<< parameters.coverage-file >> -v /home/circleci/project/test-results:/code/test-results -v /home/circleci:/home/circleci -v /home/circleci/project/CPAC/resources/configs/test_configs:/test_configs -v $PWD:/code -v $PWD/dev/circleci_data:$PWD/dev/circleci_data --workdir=/home/circleci/project --entrypoint=/bin/bash --name docker_test ${DOCKER_TAG}
- get-sample-bids-data:
- steps:
- - run:
- name: Getting Sample BIDS Data
- command: git clone https://github.com/bids-standard/bids-examples.git
get-singularity:
parameters:
version:
@@ -231,7 +222,6 @@ jobs:
- set-up-variant:
variant: "<< parameters.variant >>"
- set-python-version
- - get-sample-bids-data
- run-pytest-docker
- store_test_results:
path: test-results
diff --git a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile
index 654146ec78..86fa68315b 100644
--- a/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile
+++ b/.github/Dockerfiles/AFNI.23.3.09-jammy.Dockerfile
@@ -155,9 +155,9 @@ RUN apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/*
FROM scratch
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
AFNI ${AFNI_VERSION} (${VERSION_NAME}) stage"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
COPY --from=AFNI /lib/x86_64-linux-gnu/ld* /lib/x86_64-linux-gnu/
COPY --from=AFNI /lib/x86_64-linux-gnu/lib*so* /lib/x86_64-linux-gnu/
COPY --from=AFNI /lib64/ld* /lib64/
diff --git a/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile b/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile
index 03dd017b84..67cb8fdfad 100644
--- a/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile
+++ b/.github/Dockerfiles/ANTs.2.4.3-jammy.Dockerfile
@@ -30,8 +30,8 @@ RUN curl -sL https://github.com/ANTsX/ANTs/releases/download/v2.4.3/ants-2.4.3-u
# Only keep what we need
FROM scratch
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
ANTs 2.4.3 stage"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
COPY --from=ANTs /usr/lib/ants/ /usr/lib/ants/
COPY --from=ANTs /ants_template/ /ants_template/
diff --git a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile
index 838d8dcc4b..1debc54c4a 100644
--- a/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile
+++ b/.github/Dockerfiles/C-PAC.develop-jammy.Dockerfile
@@ -15,8 +15,8 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1
-LABEL org.opencontainers.image.description "Full C-PAC image"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.description="Full C-PAC image"
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# install C-PAC
@@ -45,7 +45,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \
&& chmod 777 $(ls / | grep -v sys | grep -v proc)
ENV PYTHONUSERBASE=/home/c-pac_user/.local
ENV PATH=$PATH:/home/c-pac_user/.local/bin \
- PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages
+ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \
+ _SHELL=/bin/bash
# set user
WORKDIR /home/c-pac_user
diff --git a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile
index b58801b519..20561f09aa 100644
--- a/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile
+++ b/.github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile
@@ -15,8 +15,8 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1
-LABEL org.opencontainers.image.description "Full C-PAC image without FreeSurfer"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer"
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# install C-PAC
@@ -46,7 +46,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \
&& chmod 777 $(ls / | grep -v sys | grep -v proc)
ENV PYTHONUSERBASE=/home/c-pac_user/.local
ENV PATH=$PATH:/home/c-pac_user/.local/bin \
- PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages
+ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \
+ _SHELL=/bin/bash
# set user
WORKDIR /home/c-pac_user
diff --git a/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile b/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile
index e4ff0f9b25..112b0feda1 100644
--- a/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile
+++ b/.github/Dockerfiles/FSL.6.0.6.5-jammy.Dockerfile
@@ -101,9 +101,9 @@ ENTRYPOINT ["/bin/bash"]
# # Only keep what we need
FROM scratch
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
FSL 6.0.6.5 stage"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
COPY --from=FSL /lib/x86_64-linux-gnu /lib/x86_64-linux-gnu
COPY --from=FSL /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu
COPY --from=FSL /usr/bin /usr/bin
diff --git a/.github/Dockerfiles/FSL.data.Dockerfile b/.github/Dockerfiles/FSL.data.Dockerfile
index c7e0b593e4..816b5e1547 100644
--- a/.github/Dockerfiles/FSL.data.Dockerfile
+++ b/.github/Dockerfiles/FSL.data.Dockerfile
@@ -18,9 +18,9 @@ RUN mkdir -p /fsl_data/atlases/HarvardOxford fsl_data/standard/tissuepriors \
&& chmod -R ugo+r /fsl_data/atlases
FROM scratch
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
FSL data"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
COPY --from=FSL /fsl_data/standard fsl_data/standard
COPY --from=FSL /fsl_data/atlases fsl_data/atlases
diff --git a/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile b/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile
index 811d20f617..ae6eac7548 100644
--- a/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile
+++ b/.github/Dockerfiles/FreeSurfer.6.0.0-min.neurodocker-jammy.Dockerfile
@@ -32,7 +32,7 @@ RUN apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
FROM scratch
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
FreeSurfer 6.0.0-min stage"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
COPY --from=FreeSurfer6 /usr/lib/freesurfer/ /usr/lib/freesurfer/
diff --git a/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile b/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile
index 2759c529eb..cc188c9aa2 100644
--- a/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile
+++ b/.github/Dockerfiles/ICA-AROMA.0.4.4-beta-jammy.Dockerfile
@@ -24,6 +24,6 @@ USER c-pac_user
# Only keep what we need
FROM scratch
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
ICA-AROMA 0.4.4-beta stage"
COPY --from=ICA-AROMA /opt/ICA-AROMA/ /opt/ICA-AROMA/
diff --git a/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile b/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile
index 3017126770..5b8a653751 100644
--- a/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile
+++ b/.github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile
@@ -26,9 +26,9 @@ RUN apt-get update \
# use neurodebian runtime as parent image
FROM neurodebian:jammy-non-free
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
Ubuntu Jammy base image"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
ARG BIDS_VALIDATOR_VERSION=1.14.6 \
DEBIAN_FRONTEND=noninteractive
ENV TZ=America/New_York \
diff --git a/.github/Dockerfiles/base-lite.Dockerfile b/.github/Dockerfiles/base-lite.Dockerfile
index 25c494942f..e5c85d258a 100644
--- a/.github/Dockerfiles/base-lite.Dockerfile
+++ b/.github/Dockerfiles/base-lite.Dockerfile
@@ -22,9 +22,9 @@ FROM ghcr.io/fcp-indi/c-pac/fsl:6.0.6.5-jammy as FSL
FROM ghcr.io/fcp-indi/c-pac/ica-aroma:0.4.4-beta-jammy as ICA-AROMA
FROM ghcr.io/fcp-indi/c-pac/ubuntu:jammy-non-free
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
Standard software dependencies for C-PAC standard and lite images"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# Installing connectome-workbench
diff --git a/.github/Dockerfiles/base-standard.Dockerfile b/.github/Dockerfiles/base-standard.Dockerfile
index de7d3841e2..0ba2cd5158 100644
--- a/.github/Dockerfiles/base-standard.Dockerfile
+++ b/.github/Dockerfiles/base-standard.Dockerfile
@@ -17,9 +17,9 @@
FROM ghcr.io/fcp-indi/c-pac/freesurfer:6.0.0-min.neurodocker-jammy as FreeSurfer
FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
Standard software dependencies for C-PAC standard images"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# Installing FreeSurfer
diff --git a/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile b/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile
index 2c1a7f1d87..9fbcdd2386 100644
--- a/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile
+++ b/.github/Dockerfiles/c3d.1.0.0-jammy.Dockerfile
@@ -36,7 +36,7 @@ RUN apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
FROM scratch
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
c3d 1.0.0 (Jammy) stage"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
COPY --from=c3d /opt/c3d/ /opt/c3d/
diff --git a/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile b/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile
index 2c958fd5d5..1932efbc8f 100644
--- a/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile
+++ b/.github/Dockerfiles/connectome-workbench.1.5.0.neurodebian-jammy.Dockerfile
@@ -24,9 +24,9 @@ RUN apt-get update \
USER c-pac_user
# FROM scratch
-# LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+# LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
# connectome-workbench 1.5.0 stage"
-# LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+# LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
# COPY --from=base /lib/x86_64-linux-gnu/ld-linux-x86-64.so.2 /lib/x86_64-linux-gnu/ld-linux-x86-64.so.2
# COPY --from=base /lib/x86_64-linux-gnu/libGL.so.1 /lib/x86_64-linux-gnu/libGL.so.1
# COPY --from=base /lib/x86_64-linux-gnu/libGLU.so.1 /lib/x86_64-linux-gnu/libGLU.so.1
diff --git a/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile b/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile
index 2f64e0ae6f..519093d3bd 100644
--- a/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile
+++ b/.github/Dockerfiles/neuroparc.1.0-human-bionic.Dockerfile
@@ -1,8 +1,8 @@
# using neurodebian runtime as parent image
FROM neurodebian:bionic-non-free
-LABEL org.opencontainers.image.description "NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
+LABEL org.opencontainers.image.description="NOT INTENDED FOR USE OTHER THAN AS A STAGE IMAGE IN A MULTI-STAGE BUILD \
neuroparc v1.0-human stage"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
ARG DEBIAN_FRONTEND=noninteractive
diff --git a/.github/README/README.md b/.github/README/README.md
index 158a762313..19e0a337d9 100644
--- a/.github/README/README.md
+++ b/.github/README/README.md
@@ -46,7 +46,7 @@ flowchart LR
subgraph build_C-PAC.yml
bCPAC[[C-PAC]]
end
- subgraph build_and_test.yml
+ subgraph build_and_test.yaml
ubuntu[[Ubnutu]]-->stages[[stages]]-->build-base[[build-base]]-->build-base-standard[[build-base-standard]]
Circle_tests[[Circle_tests]]
@@ -65,7 +65,7 @@ flowchart LR
smoke-tests-participant[[smoke-tests-participant]]
end
- on_push.yml-->build_and_test.yml
+ on_push.yaml-->build_and_test.yaml
delete_images.yml
end
@@ -79,8 +79,8 @@ flowchart LR
Circle_tests-->CircleCI((Run tests on Circle CI))
- on_push.yml<-->get_pr_base_shas
- on_push.yml-->update_all_preconfigs
+ on_push.yaml<-->get_pr_base_shas
+ on_push.yaml-->update_all_preconfigs
cpacdockerfiles<-->C-PAC
@@ -94,7 +94,7 @@ flowchart LR
bCPAC<-->local_ghcr
stages<-->local_ghcr
- push>git push]-->on_push.yml
+ push>git push]-->on_push.yaml
smoke-tests-participant-->smoke_test_human
smoke-tests-participant-->smoke_test_nhp
diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml
new file mode 100644
index 0000000000..f80c6026e8
--- /dev/null
+++ b/.github/dependabot.yaml
@@ -0,0 +1,20 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: /
+ # Check for updates once a week
+ schedule:
+ interval: weekly
+ groups:
+ all-actions:
+ patterns: [ "*" ]
+ - package-ecosystem: pip
+ directory: /
+ schedule:
+ interval: weekly
+ groups:
+ production dependencies:
+ dependency-type: production
+ development dependencies:
+ dependency-type: development
+ target-branch: develop
diff --git a/.github/workflows/build_C-PAC.yml b/.github/workflows/build_C-PAC.yml
index d126f6a778..fb103acdd7 100644
--- a/.github/workflows/build_C-PAC.yml
+++ b/.github/workflows/build_C-PAC.yml
@@ -13,20 +13,20 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Maximize build space
- uses: easimon/maximize-build-space@v6
+ uses: easimon/maximize-build-space@v10
with:
remove-dotnet: 'true'
remove-android: 'true'
remove-haskell: 'true'
overprovision-lvm: 'true'
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2.2.1
+ uses: docker/setup-buildx-action@v3.9.0
- name: Log in to GitHub Container Registry
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -87,7 +87,7 @@ jobs:
echo $DOCKERFILE
cat $DOCKERFILE
- name: Build and push Docker image
- uses: docker/build-push-action@v4.0.0
+ uses: docker/build-push-action@v6.13.0
with:
context: .
file: ${{ env.DOCKERFILE }}
diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yaml
similarity index 91%
rename from .github/workflows/build_and_test.yml
rename to .github/workflows/build_and_test.yaml
index 6dadd8f9f9..0b49efdc28 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yaml
@@ -46,6 +46,10 @@ on:
description: 'third phase of staging images to rebuild (base images)'
type: string
required: true
+ test_mode:
+ description: 'lite or full?'
+ type: string
+ default: None
jobs:
Ubuntu:
@@ -58,7 +62,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set tag & see if it exists
@@ -80,17 +84,17 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Set up Docker Buildx
if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/setup-buildx-action@v2.2.1
+ uses: docker/setup-buildx-action@v3.9.0
- name: Log in to GitHub Container Registry
if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/build-push-action@v4.0.0
+ uses: docker/build-push-action@v6.13.0
with:
file: .github/Dockerfiles/${{ matrix.Dockerfile }}.Dockerfile
push: true
@@ -110,7 +114,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set tag & see if it exists
@@ -140,17 +144,17 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Set up Docker Buildx
if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/setup-buildx-action@v2.2.1
+ uses: docker/setup-buildx-action@v3.9.0
- name: Log in to GitHub Container Registry
if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
if: contains(fromJSON(env.REBUILD), matrix.Dockerfile) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/build-push-action@v4.0.0
+ uses: docker/build-push-action@v6.13.0
with:
context: .
file: .github/Dockerfiles/${{ matrix.Dockerfile }}.Dockerfile
@@ -172,21 +176,21 @@ jobs:
variant: ${{ fromJSON(inputs.phase_three) }}
steps:
- name: Maximize build space
- uses: easimon/maximize-build-space@v6
+ uses: easimon/maximize-build-space@v10
with:
remove-dotnet: 'true'
remove-android: 'true'
remove-haskell: 'true'
overprovision-lvm: 'true'
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prep source files
run: |
sed -i -e 's/^/\.github\/Dockerfiles\//' .github/stage_requirements/${{ matrix.variant }}.txt
echo 'dev/docker_data/required_afni_pkgs.txt' >> .github/stage_requirements/${{ matrix.variant }}.txt
- echo '.github/workflows/build_and_test.yml' >> .github/stage_requirements/${{ matrix.variant }}.txt
+ echo '.github/workflows/build_and_test.yaml' >> .github/stage_requirements/${{ matrix.variant }}.txt
echo '.github/stage_requirements/${{ matrix.variant }}.txt' >> .github/stage_requirements/${{ matrix.variant }}.txt
- name: Set tag & see if it exists
continue-on-error: true
@@ -215,17 +219,17 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Set up Docker Buildx
if: contains(fromJSON(env.REBUILD), matrix.variant) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/setup-buildx-action@v2.2.1
+ uses: docker/setup-buildx-action@v3.9.0
- name: Log in to GitHub Container Registry
if: contains(fromJSON(env.REBUILD), matrix.variant) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push base image
if: contains(fromJSON(env.REBUILD), matrix.variant) || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/build-push-action@v4.0.0
+ uses: docker/build-push-action@v6.13.0
with:
context: .
file: .github/Dockerfiles/base-${{ matrix.variant }}.Dockerfile
@@ -244,21 +248,21 @@ jobs:
REBUILD: ${{ inputs.rebuild_phase_three }}
steps:
- name: Maximize build space
- uses: easimon/maximize-build-space@v6
+ uses: easimon/maximize-build-space@v10
with:
remove-dotnet: 'true'
remove-android: 'true'
remove-haskell: 'true'
overprovision-lvm: 'true'
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prep source files
run: |
sed -i -e 's/^/\.github\/Dockerfiles\//' .github/stage_requirements/standard.txt
echo 'dev/docker_data/required_afni_pkgs.txt' >> .github/stage_requirements/standard.txt
- echo '.github/workflows/build_and_test.yml' >> .github/stage_requirements/standard.txt
+ echo '.github/workflows/build_and_test.yaml' >> .github/stage_requirements/standard.txt
echo '.github/stage_requirements/standard.txt' >> .github/stage_requirements/standard.txt
- name: Set tag & see if it exists
continue-on-error: true
@@ -287,17 +291,17 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Set up Docker Buildx
if: contains(fromJSON(env.REBUILD), 'standard') || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/setup-buildx-action@v2.2.1
+ uses: docker/setup-buildx-action@v3.9.0
- name: Log in to GitHub Container Registry
if: contains(fromJSON(env.REBUILD), 'standard') || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push base image
if: contains(fromJSON(env.REBUILD), 'standard') || steps.docker_tag.outputs.not_yet_exists == 1
- uses: docker/build-push-action@v4.0.0
+ uses: docker/build-push-action@v6.13.0
with:
context: .
file: .github/Dockerfiles/base-standard.Dockerfile
@@ -327,19 +331,21 @@ jobs:
if: github.ref_name == 'develop' || github.ref_name == 'main'
uses: ./.github/workflows/smoke_test_participant.yml
- regtest-lite:
- name: Run lite regression test
+ check_test_mode:
+ name: check_test_mode
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo ${{ inputs.test_mode }}
+
+ regtest:
+ name: Run regression and integration test
needs:
- C-PAC
secrets: inherit
- if: contains(github.event.head_commit.message, '[run reg-suite]')
- uses: ./.github/workflows/regression_test_lite.yml
-
- regtest-full:
- name: Run full regression test
- needs:
- - smoke-tests-participant
- uses: ./.github/workflows/regression_test_full.yml
+ if: inputs.test_mode == 'lite'
+ uses: ./.github/workflows/regtest.yaml
+ with:
+ test_mode: ${{ inputs.test_mode }}
Circle_tests:
name: Run tests on CircleCI
@@ -350,7 +356,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Trigger CircleCI tests
diff --git a/.github/workflows/delete_images.yml b/.github/workflows/delete_images.yml
index 91ed5e98df..ce259fd811 100644
--- a/.github/workflows/delete_images.yml
+++ b/.github/workflows/delete_images.yml
@@ -18,7 +18,7 @@ jobs:
IMAGE: c-pac
steps:
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: 'Delete branch image'
run: |
OWNER=$(echo ${GITHUB_REPOSITORY} | cut -d '/' -f 1)
@@ -41,7 +41,7 @@ jobs:
-X DELETE \
https://api.github.com/${OWNER_TYPE}/${OWNER}/packages/container/c-pac/versions/${VERSION_ID}
- name: Delete all containers from repository without tags
- uses: Chizkiyahu/delete-untagged-ghcr-action@v2
+ uses: Chizkiyahu/delete-untagged-ghcr-action@v6
with:
token: ${GITHUB_TOKEN}
repository_owner: ${{ github.repository_owner }}
diff --git a/.github/workflows/deploy_to_Docker_Hub.yml b/.github/workflows/deploy_to_Docker_Hub.yml
index a9aaec8fab..a8ee3e6547 100644
--- a/.github/workflows/deploy_to_Docker_Hub.yml
+++ b/.github/workflows/deploy_to_Docker_Hub.yml
@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Log in to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yaml
similarity index 79%
rename from .github/workflows/on_push.yml
rename to .github/workflows/on_push.yaml
index 60f6354dc5..58f50b6d10 100644
--- a/.github/workflows/on_push.yml
+++ b/.github/workflows/on_push.yaml
@@ -32,16 +32,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out C-PAC
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 2
- - name: Get changed files since last commit
- uses: tj-actions/changed-files@v41.0.0
- id: changed-files
- with:
- since_last_remote_commit: "true"
- files: .github/Dockerfiles/*
- json: "true"
- name: Determine stages to rebuild
env:
MESSAGE: ${{ github.event.head_commit.message }}
@@ -49,14 +42,12 @@ jobs:
run: |
# initialize phase arrays
declare -a PHASE_ONE PHASE_TWO PHASE_THREE REBUILD_PHASE_ONE REBUILD_PHASE_TWO REBUILD_PHASE_THREE
- # turn JSON array into BASH array
- CHANGED_FILES=( $(echo ${{ steps.changed-files.outputs.all_changed_files }} | sed -e 's/\[//g' -e 's/\]//g' -e 's/\,/ /g') )
# loop through stages to maybe rebuild
for STAGE in $(cat ${GITHUB_WORKSPACE}/.github/stage_requirements/phase_one.txt)
do
PHASE_ONE+=($STAGE)
# check commit message for [rebuild STAGE] or if STAGE has changed
- if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " ${STAGE} " ]]
+ if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]]
then
REBUILD_PHASE_ONE+=($STAGE)
fi
@@ -64,7 +55,7 @@ jobs:
for STAGE in $(cat ${GITHUB_WORKSPACE}/.github/stage_requirements/phase_two.txt)
do
PHASE_TWO+=($STAGE)
- if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " ${STAGE} " ]]
+ if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]]
then
REBUILD_PHASE_TWO+=($STAGE)
fi
@@ -72,14 +63,14 @@ jobs:
for STAGE in $(cat ${GITHUB_WORKSPACE}/.github/stage_requirements/phase_three.txt)
do
PHASE_THREE+=($STAGE)
- if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-${STAGE}]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " ${STAGE} " ]]
+ if [[ "${MESSAGE}" == *"[rebuild ${STAGE}]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-${STAGE}]"* ]]
then
REBUILD_PHASE_THREE+=($STAGE)
fi
done
# add base stages based on their dependencies
BASES=("${PHASE_THREE[@]}" standard)
- if [[ "${MESSAGE}" == *"[rebuild standard]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-standard]"* ]] || [[ " ${CHANGED_FILES[*]} " =~ " standard " ]]
+ if [[ "${MESSAGE}" == *"[rebuild standard]"* ]] || [[ "${MESSAGE}" == *"[rebuild base-standard]"* ]]
then
REBUILD_PHASE_THREE+=(standard)
fi
@@ -107,10 +98,36 @@ jobs:
echo "phase_three=${phase_three}" >> $GITHUB_OUTPUT
echo "rebuild_phase_three=${rebuild_phase_three}" >> $GITHUB_OUTPUT
+ check_pr:
+ runs-on: ubuntu-latest
+ outputs:
+ test_mode: ${{ steps.check_pr.outputs.test_mode }}
+ steps:
+ - name: Check out C-PAC
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 2
+ - name: Check if commit is in a PR to develop
+ id: check_pr
+ run: |
+ TEST_MODE=none
+ if echo "${{ github.event.head_commit.message }}" | grep -q '\[run reg-suite lite\]'
+ then
+ TEST_MODE=lite
+ elif gh pr list --base develop --json number,state,draft | jq 'any(.[]; .state == "OPEN" or .draft == true)'; then
+ TEST_MODE=lite
+ elif gh pr list --base main --json number,state,draft | jq 'any(.[]; .state == "OPEN" or .draft == true)'; then
+ TEST_MODE=full
+ fi
+ echo "test_mode=${TEST_MODE}"
+ echo "test_mode=${TEST_MODE}" >> $GITHUB_OUTPUT
+
build-stages:
name: Build multistage image stages
- needs: check-updated-preconfigs
- uses: ./.github/workflows/build_and_test.yml
+ needs:
+ - check_pr
+ - check-updated-preconfigs
+ uses: ./.github/workflows/build_and_test.yaml
secrets: inherit
with:
phase_one: ${{ needs.check-updated-preconfigs.outputs.phase_one }}
@@ -119,3 +136,4 @@ jobs:
rebuild_phase_two: ${{ needs.check-updated-preconfigs.outputs.rebuild_phase_two }}
phase_three: ${{ needs.check-updated-preconfigs.outputs.phase_three }}
rebuild_phase_three: ${{ needs.check-updated-preconfigs.outputs.rebuild_phase_three }}
+ test_mode: ${{ needs.check_pr.outputs.test_mode }}
diff --git a/.github/workflows/regression_test_full.yml b/.github/workflows/regression_test_full.yml
deleted file mode 100644
index 6dba2d1bf2..0000000000
--- a/.github/workflows/regression_test_full.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-name: Run Regression Full Test
-
-on:
- workflow_call:
-
-jobs:
- test:
- name: Regression Test - Full
- runs-on: ubuntu-latest
- steps:
- - name: Get C-PAC branch
- run: |
- GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
- if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
- then
- TAG=${GITHUB_BRANCH//\//_}
- elif [[ $GITHUB_BRANCH == 'develop' ]]
- then
- TAG=nightly
- elif [[ $GITHUB_BRANCH == 'main' ]]
- then
- TAG=latest
- fi
-
- - name: Checkout Code
- uses: actions/checkout@v2
- - name: Clone reg-suite
- run: |
- git clone https://github.com/amygutierrez/reg-suite.git
-
- - name: Run Full Regression Test
- if: ${{ github.event_name }} == "pull_request" && ${{ github.event.pull_request.state }} == "closed" && ${{ github.event.pull_request.merged }} == "true" && ${{ github.event.pull_request.base.ref }} == "main"
- run: |
- echo "Running full regression test"
- echo "୧(๑•̀ヮ•́)૭ LET'S GO! ٩(^ᗜ^ )و "
-
- - uses: actions/upload-artifact@v3
- if: always()
- with:
- name: logs
- path: output/*/*/log/
diff --git a/.github/workflows/regression_test_lite.yml b/.github/workflows/regression_test_lite.yml
deleted file mode 100644
index 4e6b5a46f6..0000000000
--- a/.github/workflows/regression_test_lite.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-name: Launch lite regression test
-
-on:
- pull_request:
- branches:
- - develop
- types:
- - opened
- - ready_for_review
- - reopened
- workflow_call:
- secrets:
- GH_CLI_BIN_PATH:
- description: 'path to directory containing GitHub CLI binary if not on default $PATH'
- required: false
- SSH_PRIVATE_KEY:
- required: true
- SSH_USER:
- required: true
- SSH_HOST:
- required: true
- SSH_WORK_DIR:
- required: true
- workflow_dispatch:
-
-jobs:
- test:
- name: Regression Test - Lite
- environment: ACCESS
- env:
- SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
- SSH_HOST: ${{ secrets.SSH_HOST }}
- if: "${{ github.env.SSH_PRIVATE_KEY }} != ''"
- runs-on: ubuntu-latest
- steps:
- - name: Get C-PAC branch
- run: |
- if [[ ! $GITHUB_REF_NAME == 'main' ]] && [[ ! $GITHUB_REF_NAME == 'develop' ]]
- then
- TAG=${GITHUB_REF_NAME//\//_}
- elif [[ $GITHUB_REF_NAME == 'develop' ]]
- then
- TAG=nightly
- elif [[ $GITHUB_REF_NAME == 'main' ]]
- then
- TAG=latest
- fi
- TAG=$TAG$VARIANT
- echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV
- cat $GITHUB_ENV
-
- - name: Install SSH Keys
- run: |
- mkdir -p ~/.ssh/
- echo "${{ env.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa
- chmod 600 ~/.ssh/id_rsa
- ssh-keyscan -H -t rsa "${{ env.SSH_HOST }}" > ~/.ssh/known_hosts
-
- - name: Initiate check
- uses: guibranco/github-status-action-v2@v1.1.7
- with:
- authToken: ${{ secrets.GITHUB_TOKEN }}
- context: Launch lite regression test
- description: launching
- state: pending
-
- - name: Connect and Run Regression Test Lite
- uses: appleboy/ssh-action@v1.0.0
- with:
- host: ${{ secrets.SSH_HOST }}
- username: ${{ secrets.SSH_USER }}
- key: ${{ secrets.SSH_PRIVATE_KEY }}
- command_timeout: 200m
- script: |
- cd ${{ secrets.SSH_WORK_DIR }}
- if [ ! -d slurm_testing ] ; then
- git clone https://github.com/${{ github.repository_owner }}/slurm_testing slurm_testing
- else
- cd slurm_testing
- git pull origin regression/after_runs
- cd ..
- fi
- mkdir -p ./logs/${{ github.sha }}
- sbatch --export="HOME_DIR=${{ secrets.SSH_WORK_DIR }},IMAGE=${{ env.DOCKER_TAG }},OWNER=${{ github.repository_owner }},PATH_EXTRA=${{ secrets.GH_CLI_BIN_PATH }},REPO=$(echo ${{ github.repository }} | cut -d '/' -f 2),SHA=${{ github.sha }}" --output=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/out.log --error=${{ secrets.SSH_WORK_DIR }}/logs/${{ github.sha }}/error.log slurm_testing/.github/scripts/launch_regtest_lite.SLURM
-
- - name: Cleanup SSH
- run: |
- rm -rf ~/.ssh
diff --git a/.github/workflows/regtest.yaml b/.github/workflows/regtest.yaml
new file mode 100644
index 0000000000..04c6b14d15
--- /dev/null
+++ b/.github/workflows/regtest.yaml
@@ -0,0 +1,116 @@
+name: Launch regression test
+
+on:
+ pull_request:
+ branches:
+ - develop
+ types:
+ - opened
+ - ready_for_review
+ - reopened
+ workflow_call:
+ inputs:
+ test_mode:
+ type: string
+ required: true
+ secrets:
+ GH_CLI_BIN_PATH:
+ description: 'path to directory containing GitHub CLI binary if not on default $PATH'
+ required: false
+ SSH_PRIVATE_KEY:
+ required: true
+ SSH_USER:
+ required: true
+ SSH_HOST:
+ required: true
+ SSH_WORK_DIR:
+ required: true
+ workflow_dispatch:
+ inputs:
+ test_mode:
+ type: string
+ required: true
+
+jobs:
+ test:
+ name: Regression Test - ${{ inputs.test_mode }}
+ environment: ACCESS
+ env:
+ COMPARISON_PATH: ${{ secrets.COMPARISON_PATH }}
+ DASHBOARD_REPO: ${{ vars.DASHBOARD_REPO}}
+ DOCKER_TAG:
+ GH_CLI_BIN_PATH: ${{ secrets.GH_CLI_BIN_PATH }}
+ SLURM_TESTING_BRANCH: ${{ vars.SLURM_TESTING_BRANCH }}
+ SLURM_TESTING_PACKAGE: ${{ vars.SLURM_TESTING_PACKAGE }}
+ SLURM_TESTING_REPO: ${{ vars.SLURM_TESTING_REPO }}
+ SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
+ SSH_HOST: ${{ secrets.SSH_HOST }}
+ SSH_USER: ${{ secrets.SSH_USER }}
+ SSH_WORK_DIR: ${{ secrets.SSH_WORK_DIR }}
+ TOKEN_FILE: ${{ secrets.TOKEN_FILE }}
+ if: |
+ ${{ github.env.SSH_PRIVATE_KEY != '' &&
+ (github.event_name == 'workflow_dispatch' ||
+ (github.event_name == 'pull_request' &&
+ github.event.repository.fork == 'false')) }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Get C-PAC branch
+ run: |
+ if [[ ! $GITHUB_REF_NAME == 'main' ]] && [[ ! $GITHUB_REF_NAME == 'develop' ]]
+ then
+ TAG=${GITHUB_REF_NAME//\//_}
+ elif [[ $GITHUB_REF_NAME == 'develop' ]]
+ then
+ TAG=nightly
+ elif [[ $GITHUB_REF_NAME == 'main' ]]
+ then
+ TAG=latest
+ fi
+ TAG=$TAG$VARIANT
+ echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV
+ cat $GITHUB_ENV
+
+ - name: Install SSH Keys
+ run: |
+ mkdir -p ~/.ssh/
+ echo "${{ env.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa
+ chmod 600 ~/.ssh/id_rsa
+ ssh-keyscan -H -t rsa "${{ env.SSH_HOST }}" > ~/.ssh/known_hosts
+
+ - name: Connect and Run Regression Test ${{ inputs.test_mode }}
+ uses: appleboy/ssh-action@v1.0.0
+ with:
+ host: ${{ env.SSH_HOST }}
+ username: ${{ env.SSH_USER }}
+ key: ${{ env.SSH_PRIVATE_KEY }}
+ command_timeout: 200m
+ script: |
+ set -x
+ cd ${{ env.SSH_WORK_DIR }}
+ if pip show "${{ env.SLURM_TESTING_PACKAGE }}" > /dev/null 2>&1; then
+ # If the package is installed, upgrade it
+ python3 -m pip install --user --upgrade --force-reinstall "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip"
+ else
+ # If the package is not installed, install it
+ python3 -m pip install --user "https://github.com/${{ env.SLURM_TESTING_REPO }}/archive/${{ env.SLURM_TESTING_BRANCH }}.zip"
+ fi
+ _CPAC_SLURM_TESTING_WD="${{ env.SSH_WORK_DIR }}/automatic_tests/${{ inputs.test_mode }}/${{ github.sha }}"
+ mkdir -p "${_CPAC_SLURM_TESTING_WD}"
+ sbatch cpac-slurm-status ${{ inputs.test_mode }} launch \
+ --wd="${_CPAC_SLURM_TESTING_WD}" \
+ --comparison-path="${{ env.COMPARISON_PATH }}" \
+ --dashboard-repo="${{ env.DASHBOARD_REPO}}" \
+ --home-dir="${{ env.SSH_WORK_DIR }}" \
+ --image="${{ env.DOCKER_TAG }}" \
+ --owner="${{ github.repository_owner }}" \
+ --path-extra="${{ env.GH_CLI_BIN_PATH }}" \
+ --repo="${{ github.repository }}" \
+ --sha="${{ github.sha }}" \
+ --slurm-testing-branch="${{ env.SLURM_TESTING_BRANCH }}" \
+ --slurm-testing-repo="${{ env.SLURM_TESTING_REPO }}" \
+ --token-file="${{ env.TOKEN_FILE }}"
+
+ - name: Cleanup SSH
+ run: |
+ rm -rf ~/.ssh
diff --git a/.github/workflows/smoke_test_participant.yml b/.github/workflows/smoke_test_participant.yml
index 3fde0de8aa..e41292535f 100644
--- a/.github/workflows/smoke_test_participant.yml
+++ b/.github/workflows/smoke_test_participant.yml
@@ -104,7 +104,7 @@ jobs:
--participant_label ${{ matrix.participant }} \
--preconfig ${{ matrix.preconfig }} \
--n_cpus 2
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: expectedOutputs human ${{ matrix.preconfig }} ${{ matrix.variant }} ${{ matrix.participant }}
@@ -144,14 +144,14 @@ jobs:
TAG=$TAG$VARIANT
echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV
cat $GITHUB_ENV
+ - name: setup-conda
+ uses: conda-incubator/setup-miniconda@v3.1.1
- name: Set up datalad-OSF
run: |
+ sudo apt-get update && sudo apt-get install -y git-annex
git config --global user.email "CMI_CPAC_Support@childmind.org"
git config --global user.name "Theodore (Machine User)"
- wget -O- http://neuro.debian.net/lists/jammy.us-tn.libre | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list
- sudo apt-key adv --recv-keys --keyserver hkps://keyserver.ubuntu.com 0xA5D32F012649A5A9
- sudo apt-get update
- sudo apt-get install datalad git-annex-standalone
+ yes | conda install -c conda-forge datalad
pip install datalad-osf
- name: Get NHP test data
run: |
@@ -168,7 +168,7 @@ jobs:
--preconfig ${{ matrix.preconfig }} \
--participant_label ${{ matrix.participant }} \
--n_cpus 2
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: expectedOutputs nhp ${{ matrix.preconfig }} ${{ matrix.variant }} ${{ matrix.participant }}
@@ -203,17 +203,24 @@ jobs:
TAG=$TAG$VARIANT
echo DOCKER_TAG=$(echo "ghcr.io/${{ github.repository }}" | tr '[:upper:]' '[:lower:]'):$TAG >> $GITHUB_ENV
cat $GITHUB_ENV
+ - name: setup-conda
+ uses: conda-incubator/setup-miniconda@v3.1.1
+ with:
+ activate-environment: datalad-osf
+ channels: conda-forge
+ conda-remove-defaults: "true"
+ python-version: 3.12
- name: Set up datalad-OSF
run: |
+ sudo apt-get update && sudo apt-get install -y git-annex
git config --global user.email "CMI_CPAC_Support@childmind.org"
git config --global user.name "Theodore (Machine User)"
- wget -O- http://neuro.debian.net/lists/jammy.us-tn.libre | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list
- sudo apt-key adv --recv-keys --keyserver hkps://keyserver.ubuntu.com 0xA5D32F012649A5A9
- sudo apt-get update
- sudo apt-get install datalad git-annex-standalone
+ yes | conda install -c conda-forge datalad
pip install datalad-osf
- name: Get rodent test data
run: |
+ export GIT_TRACE=1
+ export DATALAD_LOG_LEVEL=DEBUG
datalad clone osf://uya3r test-data
- name: Run rodent smoke test
run: |
@@ -226,7 +233,7 @@ jobs:
/test-data /outputs test_config \
--preconfig rodent \
--n_cpus 2
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: expectedOutputs rodent ${{ matrix.variant }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 957e36b029..66b0a5da0e 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -14,6 +14,9 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
+ci:
+ skip: [ruff, update-yaml-comments]
+
fail_fast: false
repos:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index df8f40a666..078114399e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -19,15 +19,43 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- `pyproject.toml` file with `[build-system]` defined.
+- [](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) badge to [`README`](./README.md).
+- `desired_orientation` key in participant-level pipeline config under `pipeline_setup`.
+- Required positional parameter "wf" in input and output of `ingress_pipeconfig_paths` function, where a node to reorient templates is added to the `wf`.
+- Required positional parameter "orientation" to `resolve_resolution`.
+- Optional positional argument "cfg" to `create_lesion_preproc`.
+- Allow enabling `overwrite_transform` only when the registration method is `ANTS`.
+- `resource_inventory` utility to inventory NodeBlock function inputs and outputs.
+- New switch `mask_sbref` under `func_input_prep` in functional registration and set to default `on`.
+- New resource `desc-head_bold` as non skull-stripped bold from nodeblock `bold_masking`.
+- `censor_file_path` from `offending_timepoints_connector` in the `build_nuisance_regressor` node.
### Changed
- Moved `pygraphviz` from requirements to `graphviz` optional dependencies group.
+- Automatically tag untagged `subject_id` and `unique_id` as `!!str` when loading data config files.
+- Made orientation configurable (was hard-coded as "RPI").
+- Resource-not-found errors now include information about where to source those resources.
+- Moved `ref_mask_res_2` and `T1w_template_res-2` fields from registration into surface under `abcd_prefreesurfer_prep`.
+- Moved `find_censors node` inside `create_nuisance_regression_workflow` into its own function/subworkflow as `offending_timepoints_connector`.
+- [FSL-AFNI subworkflow](https://github.com/FCP-INDI/C-PAC/blob/4bdd6c410ef0a9b90f53100ea005af1f7d6e76c0/CPAC/func_preproc/func_preproc.py#L1052C4-L1231C25)
+ - Moved `FSL-AFNI subworkflow` from inside a `bold_mask_fsl_afni` nodeblock into a separate function.
+ - Renamed `desc-ref_bold` created in this workflow to `desc-unifized_bold`.
+ - `coregistration_prep_fmriprep` nodeblock now checks if `desc-unifized_bold` exists in the Resource Pool, if not it runs the `FSL-AFNI subworkflow` to create it.
+- Input `desc-brain_bold` to `desc-preproc_bold` for `sbref` generation nodeblock `coregistration_prep_vol`.
+- Turned `generate_xcpqc_files` on for all preconfigurations except `blank`.
+- Introduced specific switch `restore_t1w_intensity` for `correct_restore_brain_intensity_abcd` nodeblock, enabling it by default only in `abcd-options` pre-config.
+- Updated GitHub Actions to run automated integration and regression tests on HPC.
### Fixed
- A bug in which AWS S3 encryption was looked for in Nipype config instead of pipeline config (only affected uploading logs).
- Restored `bids-validator` functionality.
+- Fixed empty `shell` variable in cluster run scripts.
+- A bug in which bandpass filters always assumed 1D regressor files have exactly 5 header rows.
+- Removed an erroneous connection to AFNI 3dTProject in nuisance denoising that would unnecessarily send a spike regressor as a censor. This would sometimes cause TRs to unnecessarily be dropped from the timeseries as if scrubbing were being performed.
+- Lingering calls to `cpac_outputs.csv` (was changed to `cpac_outputs.tsv` in v1.8.1).
+- A bug in the `freesurfer_abcd_preproc` nodeblock where the `Template` image was incorrectly used as `reference` during the `inverse_warp` step. Replacing it with the subject-specific `T1w` image resolved the issue of the `desc-restoreBrain_T1w` being chipped off.
### Removed
@@ -35,6 +63,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- `ABCD-HCP`
- `fMRIPrep-LTS`
- Typehinting support for Python < 3.10.
+- Extra outputs listed in `freesurfer_abcd_preproc`.
+- Resource `space-template_desc-T1w_mask`
+ - as output from FNIRT registration.
+ - as inputs from Nodeblocks requesting it and, replaced with `space-template_desc-brain_mask`.
+ - from outputs tsv.
+- Inputs `[desc-motion_bold, bold]` from `coregistration_prep_vol` nodeblock.
+- `input` field from `coregistration` in blank and default config.
+- `reg_with_skull` swtich from `func_input_prep` in blank and default config.
+
+#### Removed CI dependency
+
+- `tj-actions/changed-files` ([CVE-2023-51664](https://www.stepsecurity.io/blog/harden-runner-detection-tj-actions-changed-files-action-is-compromised))
## [1.8.7] - 2024-05-03
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 24b37bcd47..2f54c2a947 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -80,3 +80,4 @@ We have 3 types of staging Dockerfiles: operating system, software dependency, a
* To change a dependency in a C-PAC image, update the stage images at the top of the relevant `.github/Dockerfiles/C-PAC.develop-*.Dockerfile`.
* If a Dockerfile does not yet exist for the added dependency, create a Dockerfile for the new dependency and add the filename (without extension) to [`jobs.stages.strategy.matrix.Dockerfile` in `.github/workflows/build_stages.yml`](https://github.com/FCP-INDI/C-PAC/blob/4e18916384e52c3dc9610aea3eed537c19d480e3/.github/workflows/build_stages.yml#L77-L97)
* If no Dockerfiles use the removed dependency, remove the Dockerfile for the dependency and remove the filename from [`jobs.stages.strategy.matrix.Dockerfile` in `.github/workflows/build_stages.yml`](https://github.com/FCP-INDI/C-PAC/blob/4e18916384e52c3dc9610aea3eed537c19d480e3/.github/workflows/build_stages.yml#L77-L97)
+* When making changes to a Dockerfile, include the line `[rebuild {filename}]` where `filename` is the name of the Dockerfile without the extension (e.g., `[rebuild Ubuntu.jammy-non-free]`).
diff --git a/CPAC/__main__.py b/CPAC/__main__.py
index 90eb435b23..0b088c67f2 100644
--- a/CPAC/__main__.py
+++ b/CPAC/__main__.py
@@ -15,7 +15,6 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from logging import basicConfig, INFO
import os
import click
@@ -26,7 +25,6 @@
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC")
-basicConfig(format="%(message)s", level=INFO)
# CLI tree
#
diff --git a/CPAC/_entrypoints/run.py b/CPAC/_entrypoints/run.py
index ffeb1c8352..f84b6cf799 100755
--- a/CPAC/_entrypoints/run.py
+++ b/CPAC/_entrypoints/run.py
@@ -454,6 +454,14 @@ def run_main():
action="store_true",
)
+ parser.add_argument(
+ "--freesurfer_dir",
+ "--freesurfer-dir",
+ help="Specify path to pre-computed FreeSurfer outputs "
+ "to pull into C-PAC run",
+ default=False,
+ )
+
# get the command line arguments
args = parser.parse_args(
sys.argv[1 : (sys.argv.index("--") if "--" in sys.argv else len(sys.argv))]
@@ -743,6 +751,9 @@ def run_main():
c["pipeline_setup", "system_config", "num_participants_at_once"],
)
+ if args.freesurfer_dir:
+ c["pipeline_setup"]["freesurfer_dir"] = args.freesurfer_dir
+
if not args.data_config_file:
WFLOGGER.info("Input directory: %s", bids_dir)
@@ -783,9 +794,8 @@ def run_main():
sub_list = load_cpac_data_config(
args.data_config_file, args.participant_label, args.aws_input_creds
)
- list(sub_list)
sub_list = sub_list_filter_by_labels(
- sub_list, {"T1w": args.T1w_label, "bold": args.bold_label}
+ list(sub_list), {"T1w": args.T1w_label, "bold": args.bold_label}
)
# C-PAC only handles single anatomical images (for now)
diff --git a/CPAC/_global_fixtures.py b/CPAC/_global_fixtures.py
new file mode 100644
index 0000000000..7b765736ee
--- /dev/null
+++ b/CPAC/_global_fixtures.py
@@ -0,0 +1,34 @@
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Global fixtures for C-PAC tests."""
+
+from pathlib import Path
+
+from _pytest.tmpdir import TempPathFactory
+from git import Repo
+import pytest
+
+
+@pytest.fixture(scope="session")
+def bids_examples(tmp_path_factory: TempPathFactory) -> Path:
+ """Get the BIDS examples dataset."""
+ example_dir = tmp_path_factory.mktemp("bids-examples")
+ if not example_dir.exists() or not any(example_dir.iterdir()):
+ Repo.clone_from(
+ "https://github.com/bids-standard/bids-examples.git", str(example_dir)
+ )
+ return example_dir
diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py
index 0f4e770f97..683bb522f7 100644
--- a/CPAC/anat_preproc/anat_preproc.py
+++ b/CPAC/anat_preproc/anat_preproc.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2012-2023 C-PAC Developers
+# Copyright (C) 2012-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -1233,7 +1233,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
mem_gb=0,
mem_x=(0.0115, "in_file", "t"),
)
- reorient_fs_brainmask.inputs.orientation = "RPI"
+ reorient_fs_brainmask.inputs.orientation = cfg.pipeline_setup["desired_orientation"]
reorient_fs_brainmask.inputs.outputtype = "NIFTI_GZ"
wf.connect(
@@ -1255,7 +1255,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
mem_gb=0,
mem_x=(0.0115, "in_file", "t"),
)
- reorient_fs_T1.inputs.orientation = "RPI"
+ reorient_fs_T1.inputs.orientation = cfg.pipeline_setup["desired_orientation"]
reorient_fs_T1.inputs.outputtype = "NIFTI_GZ"
wf.connect(convert_fs_T1_to_nifti, "out_file", reorient_fs_T1, "in_file")
@@ -1460,7 +1460,7 @@ def anatomical_init(wf, cfg, strat_pool, pipe_num, opt=None):
mem_gb=0,
mem_x=(0.0115, "in_file", "t"),
)
- anat_reorient.inputs.orientation = "RPI"
+ anat_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"]
anat_reorient.inputs.outputtype = "NIFTI_GZ"
wf.connect(anat_deoblique, "out_file", anat_reorient, "in_file")
@@ -2268,7 +2268,7 @@ def anatomical_init_T2(wf, cfg, strat_pool, pipe_num, opt=None):
mem_gb=0,
mem_x=(0.0115, "in_file", "t"),
)
- T2_reorient.inputs.orientation = "RPI"
+ T2_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"]
T2_reorient.inputs.outputtype = "NIFTI_GZ"
wf.connect(T2_deoblique, "out_file", T2_reorient, "in_file")
@@ -2572,7 +2572,7 @@ def brain_mask_acpc_niworkflows_ants_T2(wf, cfg, strat_pool, pipe_num, opt=None)
config=["anatomical_preproc", "brain_extraction"],
option_key="using",
option_val="UNet",
- inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet_model"],
+ inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet-model"],
outputs=["space-T2w_desc-brain_mask"],
)
def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None):
@@ -2586,7 +2586,7 @@ def brain_mask_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None):
config=["anatomical_preproc", "brain_extraction"],
option_key="using",
option_val="UNet",
- inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet_model"],
+ inputs=["desc-preproc_T2w", "T1w-brain-template", "T1w-template", "unet-model"],
outputs=["space-T2w_desc-acpcbrain_mask"],
)
def brain_mask_acpc_unet_T2(wf, cfg, strat_pool, pipe_num, opt=None):
@@ -2764,24 +2764,6 @@ def brain_extraction_temp_T2(wf, cfg, strat_pool, pipe_num, opt=None):
"desc-restore-brain_T1w",
"desc-ABCDpreproc_T1w",
"pipeline-fs_desc-fast_biasfield",
- "pipeline-fs_hemi-L_desc-surface_curv",
- "pipeline-fs_hemi-R_desc-surface_curv",
- "pipeline-fs_hemi-L_desc-surfaceMesh_pial",
- "pipeline-fs_hemi-R_desc-surfaceMesh_pial",
- "pipeline-fs_hemi-L_desc-surfaceMesh_smoothwm",
- "pipeline-fs_hemi-R_desc-surfaceMesh_smoothwm",
- "pipeline-fs_hemi-L_desc-surfaceMesh_sphere",
- "pipeline-fs_hemi-R_desc-surfaceMesh_sphere",
- "pipeline-fs_hemi-L_desc-surfaceMap_sulc",
- "pipeline-fs_hemi-R_desc-surfaceMap_sulc",
- "pipeline-fs_hemi-L_desc-surfaceMap_thickness",
- "pipeline-fs_hemi-R_desc-surfaceMap_thickness",
- "pipeline-fs_hemi-L_desc-surfaceMap_volume",
- "pipeline-fs_hemi-R_desc-surfaceMap_volume",
- "pipeline-fs_hemi-L_desc-surfaceMesh_white",
- "pipeline-fs_hemi-R_desc-surfaceMesh_white",
- "pipeline-fs_wmparc",
- "freesurfer-subject-dir",
],
)
def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None):
@@ -2922,6 +2904,18 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None):
"pipeline-fs_brainmask",
"pipeline-fs_wmparc",
"pipeline-fs_T1",
+ *[
+ f"pipeline-fs_hemi-{hemi}_{entity}"
+ for hemi in ["L", "R"]
+ for entity in [
+ "desc-surface_curv",
+ *[
+ f"desc-surfaceMesh_{_}"
+ for _ in ["pial", "smoothwm", "sphere", "white"]
+ ],
+ *[f"desc-surfaceMap_{_}" for _ in ["sulc", "thickness", "volume"]],
+ ]
+ ],
*freesurfer_abcd_preproc.outputs,
# we're grabbing the postproc outputs and appending them to
# the reconall outputs
@@ -3059,12 +3053,11 @@ def fnirt_based_brain_extraction(config=None, wf_name="fnirt_based_brain_extract
preproc.connect(non_linear_reg, "field_file", apply_warp, "field_file")
# Invert warp and transform dilated brain mask back into native space, and use it to mask input image
- # Input and reference spaces are the same, using 2mm reference to save time
- # invwarp --ref="$Reference2mm" -w "$WD"/str2standard.nii.gz -o "$WD"/standard2str.nii.gz
+ # invwarp --ref="$T1w" -w "$WD"/str2standard.nii.gz -o "$WD"/standard2str.nii.gz
inverse_warp = pe.Node(interface=fsl.InvWarp(), name="inverse_warp")
inverse_warp.inputs.output_type = "NIFTI_GZ"
- preproc.connect(inputnode, "template_skull_for_anat_2mm", inverse_warp, "reference")
+ preproc.connect(inputnode, "anat_data", inverse_warp, "reference")
preproc.connect(non_linear_reg, "field_file", inverse_warp, "warp")
@@ -3167,9 +3160,8 @@ def fast_bias_field_correction(config=None, wf_name="fast_bias_field_correction"
@nodeblock(
name="correct_restore_brain_intensity_abcd",
- config=["anatomical_preproc", "brain_extraction"],
- option_key="using",
- option_val="FreeSurfer-ABCD",
+ config=["anatomical_preproc", "restore_t1w_intensity"],
+ switch=["run"],
inputs=[
(
"desc-preproc_T1w",
diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py
index 07871ae32d..21628c97f0 100644
--- a/CPAC/anat_preproc/lesion_preproc.py
+++ b/CPAC/anat_preproc/lesion_preproc.py
@@ -58,7 +58,7 @@ def inverse_lesion(lesion_path):
return lesion_out
-def create_lesion_preproc(wf_name="lesion_preproc"):
+def create_lesion_preproc(cfg=None, wf_name="lesion_preproc"):
"""Process lesions masks.
Lesion mask file is deobliqued and reoriented in the same way as the T1 in
@@ -133,7 +133,9 @@ def create_lesion_preproc(wf_name="lesion_preproc"):
mem_x=(0.0115, "in_file", "t"),
)
- lesion_reorient.inputs.orientation = "RPI"
+ lesion_reorient.inputs.orientation = (
+ cfg.pipeline_setup["desired_orientation"] if cfg else "RPI"
+ )
lesion_reorient.inputs.outputtype = "NIFTI_GZ"
preproc.connect(lesion_deoblique, "out_file", lesion_reorient, "in_file")
diff --git a/CPAC/anat_preproc/utils.py b/CPAC/anat_preproc/utils.py
index 39904bbb66..a494ebceda 100644
--- a/CPAC/anat_preproc/utils.py
+++ b/CPAC/anat_preproc/utils.py
@@ -502,6 +502,38 @@ def mri_convert(in_file, reslice_like=None, out_file=None, args=None):
return out_file
+def mri_convert_reorient(in_file, orientation, out_file=None):
+ """
+ Reorient the mgz files using mri_orient.
+
+ Parameters
+ ----------
+ in_file : string
+ A path of mgz input file.
+ orientation : string
+ Orientation of the output file.
+ out_file : string
+ A path of mgz output file.
+ args : string
+ Arguments of mri_convert.
+
+ Returns
+ -------
+ out_file : string
+ A path of reoriented mgz output file.
+ """
+ import os
+
+ if out_file is None:
+ out_file = in_file.split(".")[0] + "_reoriented.mgz"
+
+ cmd = "mri_convert %s %s --out_orientation %s" % (in_file, out_file, orientation)
+
+ os.system(cmd)
+
+ return out_file
+
+
def wb_command(in_file):
import os
diff --git a/CPAC/conftest.py b/CPAC/conftest.py
new file mode 100644
index 0000000000..330489ce0d
--- /dev/null
+++ b/CPAC/conftest.py
@@ -0,0 +1,21 @@
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Global fixtures for C-PAC tests."""
+
+from CPAC._global_fixtures import bids_examples
+
+__all__ = ["bids_examples"]
diff --git a/CPAC/connectome/connectivity_matrix.py b/CPAC/connectome/connectivity_matrix.py
index c0be9f3f27..38c0411e1b 100644
--- a/CPAC/connectome/connectivity_matrix.py
+++ b/CPAC/connectome/connectivity_matrix.py
@@ -171,7 +171,7 @@ def create_connectome_afni(name, method, pipe_num):
imports=["import subprocess"],
function=strip_afni_output_header,
),
- name="netcorrStripHeader{method}_{pipe_num}",
+ name=f"netcorrStripHeader{method}_{pipe_num}",
)
name_output_node = pe.Node(
diff --git a/CPAC/cwas/tests/test_cwas.py b/CPAC/cwas/tests/test_cwas.py
index 974fd83513..72abfc4d5a 100755
--- a/CPAC/cwas/tests/test_cwas.py
+++ b/CPAC/cwas/tests/test_cwas.py
@@ -16,8 +16,6 @@
# License along with C-PAC. If not, see .
"""Test the CWAS pipeline."""
-from logging import basicConfig, INFO
-
import pytest
import nibabel as nib
@@ -25,7 +23,6 @@
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.cwas.tests")
-basicConfig(format="%(message)s", level=INFO)
@pytest.mark.skip(reason="requires RegressionTester")
diff --git a/CPAC/cwas/tests/test_pipeline_cwas.py b/CPAC/cwas/tests/test_pipeline_cwas.py
index 866318821a..f910419d2c 100644
--- a/CPAC/cwas/tests/test_pipeline_cwas.py
+++ b/CPAC/cwas/tests/test_pipeline_cwas.py
@@ -16,7 +16,6 @@
# License along with C-PAC. If not, see .
"""Test the CWAS pipeline."""
-from logging import basicConfig, INFO
import os
from urllib.error import URLError
@@ -30,7 +29,6 @@
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.cwas.tests")
-basicConfig(format="%(message)s", level=INFO)
@pytest.mark.parametrize("z_score", [[0], [1], [0, 1], []])
diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py
index 7004b4f025..ff626765c4 100644
--- a/CPAC/func_preproc/func_preproc.py
+++ b/CPAC/func_preproc/func_preproc.py
@@ -501,6 +501,203 @@ def get_idx(in_files, stop_idx=None, start_idx=None):
return stopidx, startidx
+def fsl_afni_subworkflow(cfg, pipe_num, opt=None):
+ wf = pe.Workflow(name=f"fsl_afni_subworkflow_{pipe_num}")
+
+ inputNode = pe.Node(
+ util.IdentityInterface(
+ fields=[
+ "FSL-AFNI-bold-ref",
+ "FSL-AFNI-brain-mask",
+ "FSL-AFNI-brain-probseg",
+ "motion-basefile",
+ ]
+ ),
+ name="inputspec",
+ )
+
+ outputNode = pe.Node(
+ util.IdentityInterface(
+ fields=["space-bold_desc-brain_mask", "desc-unifized_bold"]
+ ),
+ name="outputspec",
+ )
+
+ # Initialize transforms with antsAI
+ init_aff = pe.Node(
+ AI(
+ metric=("Mattes", 32, "Regular", 0.2),
+ transform=("Affine", 0.1),
+ search_factor=(20, 0.12),
+ principal_axes=False,
+ convergence=(10, 1e-6, 10),
+ verbose=True,
+ ),
+ name=f"init_aff_{pipe_num}",
+ n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"],
+ )
+
+ init_aff.inputs.search_grid = (40, (0, 40, 40))
+
+ # Set up spatial normalization
+ norm = pe.Node(
+ ants.Registration(
+ winsorize_upper_quantile=0.98,
+ winsorize_lower_quantile=0.05,
+ float=True,
+ metric=["Mattes"],
+ metric_weight=[1],
+ radius_or_number_of_bins=[64],
+ transforms=["Affine"],
+ transform_parameters=[[0.1]],
+ number_of_iterations=[[200]],
+ convergence_window_size=[10],
+ convergence_threshold=[1.0e-9],
+ sampling_strategy=["Random", "Random"],
+ smoothing_sigmas=[[2]],
+ sigma_units=["mm", "mm", "mm"],
+ shrink_factors=[[2]],
+ sampling_percentage=[0.2],
+ use_histogram_matching=[True],
+ ),
+ name=f"norm_{pipe_num}",
+ n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"],
+ )
+
+ map_brainmask = pe.Node(
+ ants.ApplyTransforms(
+ interpolation="BSpline",
+ float=True,
+ ),
+ name=f"map_brainmask_{pipe_num}",
+ )
+
+ binarize_mask = pe.Node(
+ interface=fsl.maths.MathsCommand(), name=f"binarize_mask_{pipe_num}"
+ )
+ binarize_mask.inputs.args = "-thr 0.85 -bin"
+
+ # Dilate pre_mask
+ pre_dilate = pe.Node(
+ fsl.DilateImage(
+ operation="max",
+ kernel_shape="sphere",
+ kernel_size=3.0,
+ internal_datatype="char",
+ ),
+ name=f"pre_mask_dilate_{pipe_num}",
+ )
+
+ # Fix precision errors
+ # https://github.com/ANTsX/ANTs/wiki/Inputs-do-not-occupy-the-same-physical-space#fixing-precision-errors
+ print_header = pe.Node(
+ PrintHeader(what_information=4), name=f"print_header_{pipe_num}"
+ )
+ set_direction = pe.Node(SetDirectionByMatrix(), name=f"set_direction_{pipe_num}")
+
+ # Run N4 normally, force num_threads=1 for stability (images are
+ # small, no need for >1)
+ n4_correct = pe.Node(
+ ants.N4BiasFieldCorrection(
+ dimension=3, copy_header=True, bspline_fitting_distance=200
+ ),
+ shrink_factor=2,
+ rescale_intensities=True,
+ name=f"n4_correct_{pipe_num}",
+ n_procs=1,
+ )
+
+ # Create a generous BET mask out of the bias-corrected EPI
+ skullstrip_first_pass = pe.Node(
+ fsl.BET(frac=0.2, mask=True, functional=False),
+ name=f"skullstrip_first_pass_{pipe_num}",
+ )
+
+ bet_dilate = pe.Node(
+ fsl.DilateImage(
+ operation="max",
+ kernel_shape="sphere",
+ kernel_size=6.0,
+ internal_datatype="char",
+ ),
+ name=f"skullstrip_first_dilate_{pipe_num}",
+ )
+
+ bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_{pipe_num}")
+
+ # Use AFNI's unifize for T2 constrast
+ unifize = pe.Node(
+ afni_utils.Unifize(
+ t2=True,
+ outputtype="NIFTI_GZ",
+ args="-clfrac 0.2 -rbt 18.3 65.0 90.0",
+ out_file="uni.nii.gz",
+ ),
+ name=f"unifize_{pipe_num}",
+ )
+
+ # Run ANFI's 3dAutomask to extract a refined brain mask
+ skullstrip_second_pass = pe.Node(
+ preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"),
+ name=f"skullstrip_second_pass_{pipe_num}",
+ )
+
+ # Take intersection of both masks
+ combine_masks = pe.Node(
+ fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}"
+ )
+
+ # Compute masked brain
+ apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}")
+
+ wf.connect(
+ [
+ (inputNode, init_aff, [("FSL-AFNI-bold-ref", "fixed_image")]),
+ (inputNode, init_aff, [("FSL-AFNI-brain-mask", "fixed_image_mask")]),
+ (inputNode, init_aff, [("motion-basefile", "moving_image")]),
+ (init_aff, norm, [("output_transform", "initial_moving_transform")]),
+ (inputNode, norm, [("FSL-AFNI-bold-ref", "fixed_image")]),
+ (inputNode, norm, [("motion-basefile", "moving_image")]),
+ # Use the higher resolution and probseg for numerical stability in rounding
+ (inputNode, map_brainmask, [("FSL-AFNI-brain-probseg", "input_image")]),
+ (inputNode, map_brainmask, [("motion-basefile", "reference_image")]),
+ (
+ norm,
+ map_brainmask,
+ [
+ ("reverse_invert_flags", "invert_transform_flags"),
+ ("reverse_transforms", "transforms"),
+ ],
+ ),
+ (map_brainmask, binarize_mask, [("output_image", "in_file")]),
+ (binarize_mask, pre_dilate, [("out_file", "in_file")]),
+ (pre_dilate, print_header, [("out_file", "image")]),
+ (print_header, set_direction, [("header", "direction")]),
+ (
+ inputNode,
+ set_direction,
+ [("motion-basefile", "infile"), ("motion-basefile", "outfile")],
+ ),
+ (set_direction, n4_correct, [("outfile", "mask_image")]),
+ (inputNode, n4_correct, [("motion-basefile", "input_image")]),
+ (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]),
+ (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]),
+ (bet_dilate, bet_mask, [("out_file", "mask_file")]),
+ (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]),
+ (bet_mask, unifize, [("out_file", "in_file")]),
+ (unifize, skullstrip_second_pass, [("out_file", "in_file")]),
+ (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]),
+ (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]),
+ (unifize, apply_mask, [("out_file", "in_file")]),
+ (combine_masks, apply_mask, [("out_file", "mask_file")]),
+ (combine_masks, outputNode, [("out_file", "space-bold_desc-brain_mask")]),
+ (apply_mask, outputNode, [("out_file", "desc-unifized_bold")]),
+ ]
+ )
+
+ return wf
+
+
@nodeblock(
name="func_reorient",
config=["functional_preproc", "update_header"],
@@ -528,7 +725,7 @@ def func_reorient(wf, cfg, strat_pool, pipe_num, opt=None):
mem_x=(0.0115, "in_file", "t"),
)
- func_reorient.inputs.orientation = "RPI"
+ func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"]
func_reorient.inputs.outputtype = "NIFTI_GZ"
wf.connect(func_deoblique, "out_file", func_reorient, "in_file")
@@ -953,7 +1150,7 @@ def form_thr_string(thr):
"space-bold_desc-brain_mask": {
"Description": "mask of the skull-stripped input file"
},
- "desc-ref_bold": {
+ "desc-unifized_bold": {
"Description": "the ``bias_corrected_file`` after skull-stripping"
},
},
@@ -1004,6 +1201,7 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None):
# * Removed ``if not pre_mask`` conditional block
# * Modified docstring to reflect local changes
# * Refactored some variables and connections and updated style to match C-PAC codebase
+ # * Moved fsl-afni subworkflow into a separate function and added a function call in this nodeblock.
# ORIGINAL WORK'S ATTRIBUTION NOTICE:
# Copyright (c) 2016, the CRN developers team.
@@ -1048,184 +1246,23 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None):
# Modifications copyright (C) 2021 - 2024 C-PAC Developers
- # Initialize transforms with antsAI
- init_aff = pe.Node(
- AI(
- metric=("Mattes", 32, "Regular", 0.2),
- transform=("Affine", 0.1),
- search_factor=(20, 0.12),
- principal_axes=False,
- convergence=(10, 1e-6, 10),
- verbose=True,
- ),
- name=f"init_aff_{pipe_num}",
- n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"],
- )
- node, out = strat_pool.get_data("FSL-AFNI-bold-ref")
- wf.connect(node, out, init_aff, "fixed_image")
-
- node, out = strat_pool.get_data("FSL-AFNI-brain-mask")
- wf.connect(node, out, init_aff, "fixed_image_mask")
-
- init_aff.inputs.search_grid = (40, (0, 40, 40))
-
- # Set up spatial normalization
- norm = pe.Node(
- ants.Registration(
- winsorize_upper_quantile=0.98,
- winsorize_lower_quantile=0.05,
- float=True,
- metric=["Mattes"],
- metric_weight=[1],
- radius_or_number_of_bins=[64],
- transforms=["Affine"],
- transform_parameters=[[0.1]],
- number_of_iterations=[[200]],
- convergence_window_size=[10],
- convergence_threshold=[1.0e-9],
- sampling_strategy=["Random", "Random"],
- smoothing_sigmas=[[2]],
- sigma_units=["mm", "mm", "mm"],
- shrink_factors=[[2]],
- sampling_percentage=[0.2],
- use_histogram_matching=[True],
- ),
- name=f"norm_{pipe_num}",
- n_procs=cfg.pipeline_setup["system_config"]["num_OMP_threads"],
- )
-
- node, out = strat_pool.get_data("FSL-AFNI-bold-ref")
- wf.connect(node, out, norm, "fixed_image")
-
- map_brainmask = pe.Node(
- ants.ApplyTransforms(
- interpolation="BSpline",
- float=True,
- ),
- name=f"map_brainmask_{pipe_num}",
- )
-
- # Use the higher resolution and probseg for numerical stability in rounding
- node, out = strat_pool.get_data("FSL-AFNI-brain-probseg")
- wf.connect(node, out, map_brainmask, "input_image")
-
- binarize_mask = pe.Node(
- interface=fsl.maths.MathsCommand(), name=f"binarize_mask_{pipe_num}"
- )
- binarize_mask.inputs.args = "-thr 0.85 -bin"
-
- # Dilate pre_mask
- pre_dilate = pe.Node(
- fsl.DilateImage(
- operation="max",
- kernel_shape="sphere",
- kernel_size=3.0,
- internal_datatype="char",
- ),
- name=f"pre_mask_dilate_{pipe_num}",
- )
-
- # Fix precision errors
- # https://github.com/ANTsX/ANTs/wiki/Inputs-do-not-occupy-the-same-physical-space#fixing-precision-errors
- print_header = pe.Node(
- PrintHeader(what_information=4), name=f"print_header_{pipe_num}"
- )
- set_direction = pe.Node(SetDirectionByMatrix(), name=f"set_direction_{pipe_num}")
+ fsl_afni_wf = fsl_afni_subworkflow(cfg, pipe_num, opt)
- # Run N4 normally, force num_threads=1 for stability (images are
- # small, no need for >1)
- n4_correct = pe.Node(
- ants.N4BiasFieldCorrection(
- dimension=3, copy_header=True, bspline_fitting_distance=200
- ),
- shrink_factor=2,
- rescale_intensities=True,
- name=f"n4_correct_{pipe_num}",
- n_procs=1,
- )
-
- # Create a generous BET mask out of the bias-corrected EPI
- skullstrip_first_pass = pe.Node(
- fsl.BET(frac=0.2, mask=True, functional=False),
- name=f"skullstrip_first_pass_{pipe_num}",
- )
-
- bet_dilate = pe.Node(
- fsl.DilateImage(
- operation="max",
- kernel_shape="sphere",
- kernel_size=6.0,
- internal_datatype="char",
- ),
- name=f"skullstrip_first_dilate_{pipe_num}",
- )
-
- bet_mask = pe.Node(fsl.ApplyMask(), name=f"skullstrip_first_mask_{pipe_num}")
-
- # Use AFNI's unifize for T2 constrast
- unifize = pe.Node(
- afni_utils.Unifize(
- t2=True,
- outputtype="NIFTI_GZ",
- args="-clfrac 0.2 -rbt 18.3 65.0 90.0",
- out_file="uni.nii.gz",
- ),
- name=f"unifize_{pipe_num}",
- )
-
- # Run ANFI's 3dAutomask to extract a refined brain mask
- skullstrip_second_pass = pe.Node(
- preprocess.Automask(dilate=1, outputtype="NIFTI_GZ"),
- name=f"skullstrip_second_pass_{pipe_num}",
- )
-
- # Take intersection of both masks
- combine_masks = pe.Node(
- fsl.BinaryMaths(operation="mul"), name=f"combine_masks_{pipe_num}"
- )
-
- # Compute masked brain
- apply_mask = pe.Node(fsl.ApplyMask(), name=f"extract_ref_brain_bold_{pipe_num}")
-
- node, out = strat_pool.get_data(["motion-basefile"])
-
- wf.connect(
- [
- (node, init_aff, [(out, "moving_image")]),
- (node, map_brainmask, [(out, "reference_image")]),
- (node, norm, [(out, "moving_image")]),
- (init_aff, norm, [("output_transform", "initial_moving_transform")]),
- (
- norm,
- map_brainmask,
- [
- ("reverse_invert_flags", "invert_transform_flags"),
- ("reverse_transforms", "transforms"),
- ],
- ),
- (map_brainmask, binarize_mask, [("output_image", "in_file")]),
- (binarize_mask, pre_dilate, [("out_file", "in_file")]),
- (pre_dilate, print_header, [("out_file", "image")]),
- (print_header, set_direction, [("header", "direction")]),
- (node, set_direction, [(out, "infile"), (out, "outfile")]),
- (set_direction, n4_correct, [("outfile", "mask_image")]),
- (node, n4_correct, [(out, "input_image")]),
- (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]),
- (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]),
- (bet_dilate, bet_mask, [("out_file", "mask_file")]),
- (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]),
- (bet_mask, unifize, [("out_file", "in_file")]),
- (unifize, skullstrip_second_pass, [("out_file", "in_file")]),
- (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]),
- (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]),
- (unifize, apply_mask, [("out_file", "in_file")]),
- (combine_masks, apply_mask, [("out_file", "mask_file")]),
- ]
- )
+ for key in [
+ "FSL-AFNI-bold-ref",
+ "FSL-AFNI-brain-mask",
+ "FSL-AFNI-brain-probseg",
+ "motion-basefile",
+ ]:
+ node, out = strat_pool.get_data(key)
+ wf.connect(node, out, fsl_afni_wf, f"inputspec.{key}")
outputs = {
- "space-bold_desc-brain_mask": (combine_masks, "out_file"),
- "desc-ref_bold": (apply_mask, "out_file"),
+ "desc-unifized_bold": (fsl_afni_wf, "outputspec.desc-unifized_bold"),
+ "space-bold_desc-brain_mask": (
+ fsl_afni_wf,
+ "outputspec.space-bold_desc-brain_mask",
+ ),
}
return (wf, outputs)
@@ -1290,7 +1327,7 @@ def bold_mask_anatomical_refined(wf, cfg, strat_pool, pipe_num, opt=None):
mem_x=(0.0115, "in_file", "t"),
)
- func_reorient.inputs.orientation = "RPI"
+ func_reorient.inputs.orientation = cfg.pipeline_setup["desired_orientation"]
func_reorient.inputs.outputtype = "NIFTI_GZ"
wf.connect(func_deoblique, "out_file", func_reorient, "in_file")
@@ -1466,6 +1503,91 @@ def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None):
return (wf, outputs)
+def anat_brain_to_bold_res(wf_name, cfg, pipe_num):
+ wf = pe.Workflow(name=f"{wf_name}_{pipe_num}")
+
+ inputNode = pe.Node(
+ util.IdentityInterface(
+ fields=["T1w-template-funcreg", "space-template_desc-preproc_T1w"]
+ ),
+ name="inputspec",
+ )
+ outputNode = pe.Node(
+ util.IdentityInterface(fields=["space-template_res-bold_desc-brain_T1w"]),
+ name="outputspec",
+ )
+
+ # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution}
+ anat_brain_to_func_res = pe.Node(
+ interface=fsl.ApplyWarp(), name=f"resample_anat_brain_in_standard_{pipe_num}"
+ )
+
+ anat_brain_to_func_res.inputs.interp = "spline"
+ anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[
+ "anatomical_registration"
+ ]["registration"]["FSL-FNIRT"]["identity_matrix"]
+
+ wf.connect(
+ inputNode, "space-template_desc-preproc_T1w", anat_brain_to_func_res, "in_file"
+ )
+ wf.connect(inputNode, "T1w-template-funcreg", anat_brain_to_func_res, "ref_file")
+
+ wf.connect(
+ anat_brain_to_func_res,
+ "out_file",
+ outputNode,
+ "space-template_res-bold_desc-brain_T1w",
+ )
+ return wf
+
+
+def anat_brain_mask_to_bold_res(wf_name, cfg, pipe_num):
+ # Create brain masks in this space from the FreeSurfer output (changing resolution)
+ # applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz
+ wf = pe.Workflow(name=f"{wf_name}_{pipe_num}")
+ inputNode = pe.Node(
+ util.IdentityInterface(
+ fields=["space-template_desc-brain_mask", "space-template_desc-preproc_T1w"]
+ ),
+ name="inputspec",
+ )
+ outputNode = pe.Node(
+ util.IdentityInterface(fields=["space-template_desc-bold_mask"]),
+ name="outputspec",
+ )
+
+ anat_brain_mask_to_func_res = pe.Node(
+ interface=fsl.ApplyWarp(),
+ name=f"resample_anat_brain_mask_in_standard_{pipe_num}",
+ )
+
+ anat_brain_mask_to_func_res.inputs.interp = "nn"
+ anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[
+ "anatomical_registration"
+ ]["registration"]["FSL-FNIRT"]["identity_matrix"]
+
+ wf.connect(
+ inputNode,
+ "space-template_desc-brain_mask",
+ anat_brain_mask_to_func_res,
+ "in_file",
+ )
+ wf.connect(
+ inputNode,
+ "space-template_desc-preproc_T1w",
+ anat_brain_mask_to_func_res,
+ "ref_file",
+ )
+ wf.connect(
+ anat_brain_mask_to_func_res,
+ "out_file",
+ outputNode,
+ "space-template_desc-bold_mask",
+ )
+
+ return wf
+
+
@nodeblock(
name="bold_mask_anatomical_resampled",
switch=[
@@ -1478,7 +1600,7 @@ def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None):
"desc-preproc_bold",
"T1w-template-funcreg",
"space-template_desc-preproc_T1w",
- "space-template_desc-T1w_mask",
+ "space-template_desc-brain_mask",
],
outputs=[
"space-template_res-bold_desc-brain_T1w",
@@ -1491,39 +1613,35 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None):
Adapted from `DCAN Lab's BOLD mask method from the ABCD pipeline `_.
"""
- # applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution}
- anat_brain_to_func_res = pe.Node(
- interface=fsl.ApplyWarp(), name=f"resample_anat_brain_in_standard_{pipe_num}"
- )
-
- anat_brain_to_func_res.inputs.interp = "spline"
- anat_brain_to_func_res.inputs.premat = cfg.registration_workflows[
- "anatomical_registration"
- ]["registration"]["FSL-FNIRT"]["identity_matrix"]
+ anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num)
node, out = strat_pool.get_data("space-template_desc-preproc_T1w")
- wf.connect(node, out, anat_brain_to_func_res, "in_file")
+ wf.connect(
+ node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w"
+ )
node, out = strat_pool.get_data("T1w-template-funcreg")
- wf.connect(node, out, anat_brain_to_func_res, "ref_file")
+ wf.connect(node, out, anat_brain_to_func_res, "inputspec.T1w-template-funcreg")
# Create brain masks in this space from the FreeSurfer output (changing resolution)
# applywarp --rel --interp=nn -i ${FreeSurferBrainMask}.nii.gz -r ${WD}/${T1wImageFile}.${FinalfMRIResolution} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz
- anat_brain_mask_to_func_res = pe.Node(
- interface=fsl.ApplyWarp(),
- name=f"resample_anat_brain_mask_in_standard_{pipe_num}",
+ anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(
+ wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num
)
- anat_brain_mask_to_func_res.inputs.interp = "nn"
- anat_brain_mask_to_func_res.inputs.premat = cfg.registration_workflows[
- "anatomical_registration"
- ]["registration"]["FSL-FNIRT"]["identity_matrix"]
-
- node, out = strat_pool.get_data("space-template_desc-T1w_mask")
- wf.connect(node, out, anat_brain_mask_to_func_res, "in_file")
+ node, out = strat_pool.get_data("space-template_desc-brain_mask")
+ wf.connect(
+ node,
+ out,
+ anat_brain_mask_to_func_res,
+ "inputspec.space-template_desc-brain_mask",
+ )
wf.connect(
- anat_brain_to_func_res, "out_file", anat_brain_mask_to_func_res, "ref_file"
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ anat_brain_mask_to_func_res,
+ "inputspec.space-template_desc-preproc_T1w",
)
# Resample func mask in template space back to native space
@@ -1537,15 +1655,24 @@ def bold_mask_anatomical_resampled(wf, cfg, strat_pool, pipe_num, opt=None):
func_mask_template_to_native.inputs.outputtype = "NIFTI_GZ"
wf.connect(
- anat_brain_mask_to_func_res, "out_file", func_mask_template_to_native, "in_file"
+ anat_brain_mask_to_func_res,
+ "outputspec.space-template_desc-bold_mask",
+ func_mask_template_to_native,
+ "in_file",
)
node, out = strat_pool.get_data("desc-preproc_bold")
wf.connect(node, out, func_mask_template_to_native, "master")
outputs = {
- "space-template_res-bold_desc-brain_T1w": (anat_brain_to_func_res, "out_file"),
- "space-template_desc-bold_mask": (anat_brain_mask_to_func_res, "out_file"),
+ "space-template_res-bold_desc-brain_T1w": (
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ ),
+ "space-template_desc-bold_mask": (
+ anat_brain_mask_to_func_res,
+ "outputspec.space-template_desc-bold_mask",
+ ),
"space-bold_desc-brain_mask": (func_mask_template_to_native, "out_file"),
}
@@ -1703,6 +1830,10 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None):
"Description": "The skull-stripped BOLD time-series.",
"SkullStripped": True,
},
+ "desc-head_bold": {
+ "Description": "The non skull-stripped BOLD time-series.",
+ "SkullStripped": False,
+ },
},
)
def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None):
@@ -1714,8 +1845,8 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None):
func_edge_detect.inputs.expr = "a*b"
func_edge_detect.inputs.outputtype = "NIFTI_GZ"
- node, out = strat_pool.get_data("desc-preproc_bold")
- wf.connect(node, out, func_edge_detect, "in_file_a")
+ node_head_bold, out_head_bold = strat_pool.get_data("desc-preproc_bold")
+ wf.connect(node_head_bold, out_head_bold, func_edge_detect, "in_file_a")
node, out = strat_pool.get_data("space-bold_desc-brain_mask")
wf.connect(node, out, func_edge_detect, "in_file_b")
@@ -1723,6 +1854,7 @@ def bold_masking(wf, cfg, strat_pool, pipe_num, opt=None):
outputs = {
"desc-preproc_bold": (func_edge_detect, "out_file"),
"desc-brain_bold": (func_edge_detect, "out_file"),
+ "desc-head_bold": (node_head_bold, out_head_bold),
}
return (wf, outputs)
diff --git a/CPAC/image_utils/tests/test_smooth.py b/CPAC/image_utils/tests/test_smooth.py
index d1f8a8ec98..bf1c79fd94 100644
--- a/CPAC/image_utils/tests/test_smooth.py
+++ b/CPAC/image_utils/tests/test_smooth.py
@@ -14,7 +14,6 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from logging import basicConfig, INFO
import os
import pytest
@@ -26,7 +25,6 @@
from CPAC.utils.test_mocks import configuration_strategy_mock
logger = getLogger("CPAC.image_utils.tests")
-basicConfig(format="%(message)s", level=INFO)
@pytest.mark.skip(reason="needs refactoring")
diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py
index 4229fc30c6..5c989675c1 100644
--- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py
+++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py
@@ -1204,6 +1204,7 @@ def func_longitudinal_template_wf(subject_id, strat_list, config):
resampled_template.inputs.template = template
resampled_template.inputs.template_name = template_name
resampled_template.inputs.tag = tag
+ resampled_template.inputs.orientation = config["desired_orientation"]
strat_init.update_resource_pool(
{template_name: (resampled_template, "resampled_template")}
diff --git a/CPAC/nuisance/bandpass.py b/CPAC/nuisance/bandpass.py
index c5dc0f170d..451d4a5b9e 100644
--- a/CPAC/nuisance/bandpass.py
+++ b/CPAC/nuisance/bandpass.py
@@ -1,6 +1,8 @@
import os
+from pathlib import Path
import numpy as np
+from numpy.typing import NDArray
import nibabel as nib
from scipy.fftpack import fft, ifft
@@ -44,6 +46,22 @@ def ideal_bandpass(data, sample_period, bandpass_freqs):
return np.real_if_close(ifft(f_data)[:sample_length])
+def read_1D(one_D: Path | str) -> tuple[list[str], NDArray]:
+ """Parse a header from a 1D file, returing that header and a Numpy Array."""
+ header = []
+ with open(one_D, "r") as _f:
+ # Each leading line that doesn't start with a number goes into the header
+ for line in _f.readlines():
+ try:
+ float(line.split()[0])
+ break
+ except ValueError:
+ header.append(line)
+
+ regressor = np.loadtxt(one_D, skiprows=len(header))
+ return header, regressor
+
+
def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs, sample_period=None):
"""Performs ideal bandpass filtering on each voxel time-series.
@@ -106,18 +124,9 @@ def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs, sample_perio
img.to_filename(regressor_bandpassed_file)
else:
- with open(regressor_file, "r") as f:
- header = []
-
- # header wouldn't be longer than 5, right? I don't want to
- # loop over the whole file
- for i in range(5):
- line = f.readline()
- if line.startswith("#") or isinstance(line[0], str):
- header.append(line)
-
- # usecols=[list]
- regressor = np.loadtxt(regressor_file, skiprows=len(header))
+ header: list[str]
+ regressor: NDArray
+ header, regressor = read_1D(regressor_file)
Yc = regressor - np.tile(regressor.mean(0), (regressor.shape[0], 1))
Y_bp = np.zeros_like(Yc)
diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py
index 45337a0c23..ce4c1298da 100644
--- a/CPAC/nuisance/nuisance.py
+++ b/CPAC/nuisance/nuisance.py
@@ -75,8 +75,8 @@ def choose_nuisance_blocks(cfg, rpool, generate_only=False):
]
apply_transform_using = to_template_cfg["apply_transform"]["using"]
input_interface = {
- "default": ("desc-preproc_bold", ["desc-preproc_bold", "bold"]),
- "abcd": ("desc-preproc_bold", "bold"),
+ "default": ("desc-preproc_bold", ["desc-preproc_bold", "desc-reorient_bold"]),
+ "abcd": ("desc-preproc_bold", "desc-reorient_bold"),
"single_step_resampling_from_stc": ("desc-preproc_bold", "desc-stc_bold"),
}.get(apply_transform_using)
if input_interface is not None:
@@ -496,6 +496,104 @@ def gather_nuisance(
return output_file_path, censor_indices
+def offending_timepoints_connector(
+ nuisance_selectors, name="offending_timepoints_connector"
+):
+ inputspec = pe.Node(
+ util.IdentityInterface(
+ fields=[
+ "fd_j_file_path",
+ "fd_p_file_path",
+ "dvars_file_path",
+ ]
+ ),
+ name="inputspec",
+ )
+
+ wf = pe.Workflow(name=name)
+
+ outputspec = pe.Node(
+ util.IdentityInterface(fields=["out_file"]),
+ name="outputspec",
+ )
+
+ censor_selector = nuisance_selectors.get("Censor")
+
+ find_censors = pe.Node(
+ Function(
+ input_names=[
+ "fd_j_file_path",
+ "fd_j_threshold",
+ "fd_p_file_path",
+ "fd_p_threshold",
+ "dvars_file_path",
+ "dvars_threshold",
+ "number_of_previous_trs_to_censor",
+ "number_of_subsequent_trs_to_censor",
+ ],
+ output_names=["out_file"],
+ function=find_offending_time_points,
+ as_module=True,
+ ),
+ name="find_offending_time_points",
+ )
+
+ if not censor_selector.get("thresholds"):
+ msg = "Censoring requested, but thresh_metric not provided."
+ raise ValueError(msg)
+
+ for threshold in censor_selector["thresholds"]:
+ if "type" not in threshold or threshold["type"] not in [
+ "DVARS",
+ "FD_J",
+ "FD_P",
+ ]:
+ msg = "Censoring requested, but with invalid threshold type."
+ raise ValueError(msg)
+
+ if "value" not in threshold:
+ msg = "Censoring requested, but threshold not provided."
+ raise ValueError(msg)
+
+ if threshold["type"] == "FD_J":
+ find_censors.inputs.fd_j_threshold = threshold["value"]
+ wf.connect(inputspec, "fd_j_file_path", find_censors, "fd_j_file_path")
+
+ if threshold["type"] == "FD_P":
+ find_censors.inputs.fd_p_threshold = threshold["value"]
+ wf.connect(inputspec, "fd_p_file_path", find_censors, "fd_p_file_path")
+
+ if threshold["type"] == "DVARS":
+ find_censors.inputs.dvars_threshold = threshold["value"]
+ wf.connect(inputspec, "dvars_file_path", find_censors, "dvars_file_path")
+
+ if (
+ censor_selector.get("number_of_previous_trs_to_censor")
+ and censor_selector["method"] != "SpikeRegression"
+ ):
+ find_censors.inputs.number_of_previous_trs_to_censor = censor_selector[
+ "number_of_previous_trs_to_censor"
+ ]
+
+ else:
+ find_censors.inputs.number_of_previous_trs_to_censor = 0
+
+ if (
+ censor_selector.get("number_of_subsequent_trs_to_censor")
+ and censor_selector["method"] != "SpikeRegression"
+ ):
+ find_censors.inputs.number_of_subsequent_trs_to_censor = censor_selector[
+ "number_of_subsequent_trs_to_censor"
+ ]
+
+ else:
+ find_censors.inputs.number_of_subsequent_trs_to_censor = 0
+
+ wf.connect(find_censors, "out_file", outputspec, "out_file")
+
+ return wf
+
+
def create_regressor_workflow(
nuisance_selectors,
use_ants,
@@ -1547,6 +1645,38 @@ def create_regressor_workflow(
"functional_file_path",
)
+ if nuisance_selectors.get("Censor"):
+ if nuisance_selectors["Censor"]["method"] == "SpikeRegression":
+ offending_timepoints_connector_wf = offending_timepoints_connector(
+ nuisance_selectors
+ )
+ nuisance_wf.connect(
+ [
+ (
+ inputspec,
+ offending_timepoints_connector_wf,
+ [("fd_j_file_path", "inputspec.fd_j_file_path")],
+ ),
+ (
+ inputspec,
+ offending_timepoints_connector_wf,
+ [("fd_p_file_path", "inputspec.fd_p_file_path")],
+ ),
+ (
+ inputspec,
+ offending_timepoints_connector_wf,
+ [("dvars_file_path", "inputspec.dvars_file_path")],
+ ),
+ ]
+ )
+
+ nuisance_wf.connect(
+ offending_timepoints_connector_wf,
+ "outputspec.out_file",
+ build_nuisance_regressors,
+ "censor_file_path",
+ )
+
build_nuisance_regressors.inputs.selector = nuisance_selectors
# Check for any regressors to combine into files
@@ -1656,93 +1786,28 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre
nuisance_wf = pe.Workflow(name=name)
if nuisance_selectors.get("Censor"):
- censor_methods = ["Kill", "Zero", "Interpolate", "SpikeRegression"]
-
- censor_selector = nuisance_selectors.get("Censor")
- if censor_selector.get("method") not in censor_methods:
- msg = (
- "Improper censoring method specified ({0}), "
- "should be one of {1}.".format(
- censor_selector.get("method"), censor_methods
- )
- )
- raise ValueError(msg)
-
- find_censors = pe.Node(
- Function(
- input_names=[
- "fd_j_file_path",
- "fd_j_threshold",
- "fd_p_file_path",
- "fd_p_threshold",
- "dvars_file_path",
- "dvars_threshold",
- "number_of_previous_trs_to_censor",
- "number_of_subsequent_trs_to_censor",
- ],
- output_names=["out_file"],
- function=find_offending_time_points,
- as_module=True,
- ),
- name="find_offending_time_points",
+ offending_timepoints_connector_wf = offending_timepoints_connector(
+ nuisance_selectors
)
-
- if not censor_selector.get("thresholds"):
- msg = "Censoring requested, but thresh_metric not provided."
- raise ValueError(msg)
-
- for threshold in censor_selector["thresholds"]:
- if "type" not in threshold or threshold["type"] not in [
- "DVARS",
- "FD_J",
- "FD_P",
- ]:
- msg = "Censoring requested, but with invalid threshold type."
- raise ValueError(msg)
-
- if "value" not in threshold:
- msg = "Censoring requested, but threshold not provided."
- raise ValueError(msg)
-
- if threshold["type"] == "FD_J":
- find_censors.inputs.fd_j_threshold = threshold["value"]
- nuisance_wf.connect(
- inputspec, "fd_j_file_path", find_censors, "fd_j_file_path"
- )
-
- if threshold["type"] == "FD_P":
- find_censors.inputs.fd_p_threshold = threshold["value"]
- nuisance_wf.connect(
- inputspec, "fd_p_file_path", find_censors, "fd_p_file_path"
- )
-
- if threshold["type"] == "DVARS":
- find_censors.inputs.dvars_threshold = threshold["value"]
- nuisance_wf.connect(
- inputspec, "dvars_file_path", find_censors, "dvars_file_path"
- )
-
- if (
- censor_selector.get("number_of_previous_trs_to_censor")
- and censor_selector["method"] != "SpikeRegression"
- ):
- find_censors.inputs.number_of_previous_trs_to_censor = censor_selector[
- "number_of_previous_trs_to_censor"
- ]
-
- else:
- find_censors.inputs.number_of_previous_trs_to_censor = 0
-
- if (
- censor_selector.get("number_of_subsequent_trs_to_censor")
- and censor_selector["method"] != "SpikeRegression"
- ):
- find_censors.inputs.number_of_subsequent_trs_to_censor = censor_selector[
- "number_of_subsequent_trs_to_censor"
+ nuisance_wf.connect(
+ [
+ (
+ inputspec,
+ offending_timepoints_connector_wf,
+ [("fd_j_file_path", "inputspec.fd_j_file_path")],
+ ),
+ (
+ inputspec,
+ offending_timepoints_connector_wf,
+ [("fd_p_file_path", "inputspec.fd_p_file_path")],
+ ),
+ (
+ inputspec,
+ offending_timepoints_connector_wf,
+ [("dvars_file_path", "inputspec.dvars_file_path")],
+ ),
]
-
- else:
- find_censors.inputs.number_of_subsequent_trs_to_censor = 0
+ )
# Use 3dTproject to perform nuisance variable regression
nuisance_regression = pe.Node(
@@ -1757,17 +1822,19 @@ def create_nuisance_regression_workflow(nuisance_selectors, name="nuisance_regre
nuisance_regression.inputs.norm = False
if nuisance_selectors.get("Censor"):
- if nuisance_selectors["Censor"]["method"] == "SpikeRegression":
- nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor")
- else:
- if nuisance_selectors["Censor"]["method"] == "Interpolate":
- nuisance_regression.inputs.cenmode = "NTRP"
- else:
- nuisance_regression.inputs.cenmode = nuisance_selectors["Censor"][
- "method"
- ].upper()
+ if nuisance_selectors["Censor"]["method"] != "SpikeRegression":
+ nuisance_regression.inputs.cenmode = (
+ "NTRP"
+ if nuisance_selectors["Censor"]["method"] == "Interpolate"
+ else nuisance_selectors["Censor"]["method"].upper()
+ )
- nuisance_wf.connect(find_censors, "out_file", nuisance_regression, "censor")
+ nuisance_wf.connect(
+ offending_timepoints_connector_wf,
+ "outputspec.out_file",
+ nuisance_regression,
+ "censor",
+ )
if nuisance_selectors.get("PolyOrt"):
if not nuisance_selectors["PolyOrt"].get("degree"):
@@ -2457,6 +2524,8 @@ def nuisance_regressors_generation(
reg_tool = check_prov_for_regtool(xfm_prov)
if reg_tool is not None:
use_ants = reg_tool == "ants"
+ else:
+ use_ants = False
if cfg.switch_is_on(
[
"functional_preproc",
diff --git a/CPAC/nuisance/tests/regressors.1D b/CPAC/nuisance/tests/regressors.1D
new file mode 100644
index 0000000000..d55945bd4e
--- /dev/null
+++ b/CPAC/nuisance/tests/regressors.1D
@@ -0,0 +1,15 @@
+# Extra header
+# extra header
+# C-PAC 1.8.7.dev1
+# Nuisance regressors:
+# RotY RotYDelay RotYSq RotYDelaySq RotX RotXDelay RotXSq RotXDelaySq RotZ RotZDelay RotZSq RotZDelaySq Y YDelay YSq YDelaySq X XDelay XSq XDelaySq Z ZDelay ZSq ZDelaySq aCompCorDetrendPC0 aCompCorDetrendPC1 aCompCorDetrendPC2 aCompCorDetrendPC3 aCompCorDetrendPC4
+0.064503015618032941 0.000000000000000000 0.004160639023820202 0.000000000000000000 0.071612848897811346 0.000000000000000000 0.005128400127260760 0.000000000000000000 -0.045875642036314265 0.000000000000000000 0.002104574532244045 0.000000000000000000 0.132890000000000008 0.000000000000000000 0.017659752100000002 0.000000000000000000 0.014942199999999999 0.000000000000000000 0.000223269340840000 0.000000000000000000 0.000408556000000000 0.000000000000000000 0.000000166918005136 0.000000000000000000 -0.022348500000000000 0.024816700000000001 -0.096326200000000001 0.157762999999999987 -0.097873799999999997
+0.031640849390966043 0.064503015618032941 0.001001143350181796 0.004160639023820202 0.128928108975928074 0.071612848897811346 0.016622457284108785 0.005128400127260760 -0.067560891370646151 -0.045875642036314265 0.004564474042796250 0.002104574532244045 0.031627599999999999 0.132890000000000008 0.001000305081760000 0.017659752100000002 0.038095700000000003 0.014942199999999999 0.001451282358490000 0.000223269340840000 -0.005307810000000000 0.000408556000000000 0.000028172846996100 0.000000166918005136 -0.064876000000000003 -0.013603499999999999 0.009020350000000000 -0.160142000000000007 -0.177807999999999994
+0.014566182605406878 0.011659654350684051 0.001782025477654622 0.001708282485349496 0.087538262826358210 0.084814056613328720 0.003050410763897181 0.001983217145137512 -0.041453889502682612 -0.041248724781196566 0.000887045295189055 0.001102853114798172 0.024593061637466357 0.019123515563283400 -0.001171834437865083 -0.001702326740091272 0.013267008686230538 0.014908354440170480 -0.000023048542269668 0.000030800663864303 -0.003147503026503373 -0.002156489951271478 -0.000212523379574746 -0.000134571632225604 -0.005279020489008680 0.003309414394962159 0.006218425399968431 0.006926438427946187 0.031874911370701621
+0.023012917432044880 0.023641462459337223 0.001353826869739763 0.001428748088263631 0.128401517423642225 0.127907328597750475 0.002936077845255422 0.001732591121410621 -0.064041009402203836 -0.065349619535801984 -0.001376339705694537 -0.000867347717315630 0.055371528230890282 0.047838664356472604 -0.003939704578469714 -0.004413819725322955 0.008626921921677059 0.013521224060128565 -0.000524131399781458 -0.000509996162422567 0.001399646015426790 0.002426771079716165 -0.000697817034458711 -0.000644064148730770 0.003453684797811343 0.004439728633043883 0.005528130051255496 -0.000681564743845684 0.027088427450170843
+0.025438893846313822 0.030058212923250879 0.000838561693597976 0.001085005134557843 0.158696217127646116 0.160188595362451003 0.002834979654468744 0.001871305030454243 -0.079495085073931035 -0.083080090516398086 -0.003568788021910289 -0.002826331376429190 0.082500133838064399 0.073831252771084988 -0.006214900864815498 -0.006543763203955914 0.000519334243296480 0.008630341137520037 -0.000923363158038725 -0.000927750776503564 0.005165821347348335 0.005851034226762506 -0.001054704872395450 -0.001043041584010332 0.012752740283469200 0.004786640061712925 0.012289830660907162 -0.008745532683606035 0.014261415118720363
+0.021743016120035281 0.029688950895877426 0.000290547599874028 0.000682055571198300 0.175549364989970313 0.178338230890874111 0.002486643991830800 0.002149192970833630 -0.085377454115175486 -0.091489126463240492 -0.005383312549059558 -0.004535185285883645 0.102288251365551003 0.094066918293276736 -0.007766221033112258 -0.007876677356441979 -0.010112433374632405 0.000319385240548675 -0.001198648271548705 -0.001193505340585474 0.008037366757553616 0.007980258888708817 -0.001242736103775270 -0.001273598198058523 0.020974706057590668 0.005751802778007228 0.025351389814577394 -0.017180756363741379 -0.003956879522184370
+0.013525050094767123 0.023039913400015079 -0.000213791695822321 0.000249432472712464 0.178794499964418374 0.182090614749512603 0.001668344371008412 0.002226367140418777 -0.081444170893389012 -0.089634493861210238 -0.006575553895215308 -0.005785817468059847 0.112188805335497160 0.106323654207989879 -0.008527087208204130 -0.008379970470761666 -0.021551792557092900 -0.010410526495855658 -0.001350988613004632 -0.001312369367927021 0.010150399365352503 0.009047754995696919 -0.001267065761949068 -0.001322015050183638 0.027086406796860162 0.008769045224622980 0.041260717228531141 -0.025783341088905919 -0.023130294003556602
+0.003710293144471088 0.012303012925884141 -0.000591683949645386 -0.000159272972606234 0.170628799324984620 0.173931286958495634 0.000283113801796188 0.001792439708046661 -0.069705778794223461 -0.078851018906234180 -0.007027047515815758 -0.006451875040969878 0.111350260801486828 0.109587738981351920 -0.008599721245876775 -0.008202102875302755 -0.031732073497397532 -0.021834007346710128 -0.001401093147591972 -0.001318145135788918 0.011803916636990694 0.009558331079300939 -0.001174308952196117 -0.001222014617445004 0.030766413414606002 0.014584179038094797 0.055050504861566943 -0.034070573320800129 -0.038211308729750156
+-0.004234132549489960 0.000832649040004215 -0.000775011343076728 -0.000476609472781693 0.154706450373287646 0.158080166174354941 -0.001594467727120446 0.000677513943272737 -0.053855672812893871 -0.062403402297765788 -0.006775880525707196 -0.006520249171448194 0.100795197589240160 0.104212055737311265 -0.008213823778438519 -0.007620791647640451 -0.038786879957853938 -0.031921725342639970 -0.001382244593213621 -0.001261079549196342 0.013326020461721926 0.010106105453167591 -0.001035584634549134 -0.001043215412680393 0.032184969062050033 0.022685626981519318 0.061584251842384724 -0.041209431181336478 -0.044960991839340970
+-0.007277482359979987 -0.007732524875134966 -0.000730772864804432 -0.000639221128232586 0.134982773383533428 0.139112331565989317 -0.003731084894624379 -0.001083447029356628 -0.038226479638264539 -0.044608858813448345 -0.006004861339980094 -0.006093216205261694 0.083174466753920082 0.091804401122952045 -0.007653794404152313 -0.006957769820022970 -0.041563433510437883 -0.038870841259792399 -0.001331418827716966 -0.001192680917598046 0.014941405278128142 0.011168243606804554 -0.000922586836031674 -0.000868492699062700 0.031582380224112708 0.031133381053542616 0.057080842480777161 -0.046093261482679442 -0.041188612349529960
diff --git a/CPAC/nuisance/tests/test_bandpass.py b/CPAC/nuisance/tests/test_bandpass.py
new file mode 100644
index 0000000000..452b55d3c7
--- /dev/null
+++ b/CPAC/nuisance/tests/test_bandpass.py
@@ -0,0 +1,48 @@
+# Copyright (C) 2022 - 2024 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Tests for bandpass filters."""
+
+from importlib.abc import Traversable
+from importlib.resources import files
+from pathlib import Path
+
+from numpy.typing import NDArray
+import pytest
+
+from CPAC.nuisance.bandpass import read_1D
+
+RAW_ONE_D: Traversable = files("CPAC").joinpath("nuisance/tests/regressors.1D")
+
+
+@pytest.mark.parametrize("start_line", list(range(6)))
+def test_read_1D(start_line: int, tmp_path: Path) -> None:
+ """Test the correct number of rows are read when reading a 1D file."""
+ regressor: Path = tmp_path / f"regressor_startAtL{start_line}.1D"
+ # create a regressor.1D file with (5 - ``start_line``) lines of header
+ with (
+ RAW_ONE_D.open("r", encoding="utf-8") as _raw,
+ regressor.open("w", encoding="utf-8") as _test_file,
+ ):
+ for line in _raw.readlines()[start_line:]:
+ _test_file.write(line)
+ header: list[str]
+ data: NDArray
+ header, data = read_1D(regressor)
+ # should get the same array no matter how many lines of header
+ assert data.shape == (10, 29)
+ # all header lines should be captured
+ assert len(header) == 5 - start_line
diff --git a/CPAC/nuisance/tests/test_utils.py b/CPAC/nuisance/tests/test_utils.py
index 724d536b63..be0ea03e96 100644
--- a/CPAC/nuisance/tests/test_utils.py
+++ b/CPAC/nuisance/tests/test_utils.py
@@ -1,4 +1,3 @@
-from logging import basicConfig, INFO
import os
import tempfile
@@ -10,7 +9,6 @@
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.nuisance.tests")
-basicConfig(format="%(message)s", level=INFO)
mocked_outputs = p.resource_filename(
"CPAC", os.path.join("nuisance", "tests", "motion_statistics")
diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py
index 9de8e3a918..8d17de23d1 100644
--- a/CPAC/nuisance/utils/compcor.py
+++ b/CPAC/nuisance/utils/compcor.py
@@ -91,18 +91,33 @@ def cosine_filter(
failure_mode="error",
):
"""
- `cosine_filter` adapted from Nipype.
+ Apply cosine filter to the input BOLD image using the discrete cosine transform (DCT) method.
+
+ Adapted from nipype implementation. https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107
+ It removes the low-frequency drift from the voxel time series. The filtered image is saved to disk.
- https://github.com/nipy/nipype/blob/d353f0d/nipype/algorithms/confounds.py#L1086-L1107
Parameters
----------
- input_image_path : string
- Bold image to be filtered.
+ input_image_path : str
+ Path to the BOLD image to be filtered.
timestep : float
- 'Repetition time (TR) of series (in sec) - derived from image header if unspecified'
- period_cut : float
- Minimum period (in sec) for DCT high-pass filter, nipype default value: 128.
+ Repetition time (TR) of the series (in seconds). Derived from image header if unspecified.
+ period_cut : float, optional
+ Minimum period (in seconds) for the DCT high-pass filter. Default value is 128.
+ remove_mean : bool, optional
+ Whether to remove the mean from the voxel time series before filtering. Default is True.
+ axis : int, optional
+ The axis along which to apply the filter. Default is -1 (last axis).
+ failure_mode : {'error', 'ignore'}, optional
+ Specifies how to handle failure modes. If set to 'error', the function raises an error.
+ If set to 'ignore', it returns the input data unchanged in case of failure. Default is 'error'.
+
+ Returns
+ -------
+ cosfiltered_img : str
+ Path to the filtered BOLD image.
+
"""
# STATEMENT OF CHANGES:
# This function is derived from sources licensed under the Apache-2.0 terms,
@@ -113,6 +128,7 @@ def cosine_filter(
# * Removed caluclation and return of `non_constant_regressors`
# * Modified docstring to reflect local changes
# * Updated style to match C-PAC codebase
+ # * Updated to use generator and iterate over voxel time series to optimize memory usage.
# ORIGINAL WORK'S ATTRIBUTION NOTICE:
# Copyright (c) 2009-2016, Nipype developers
@@ -132,41 +148,74 @@ def cosine_filter(
# Prior to release 0.12, Nipype was licensed under a BSD license.
# Modifications copyright (C) 2019 - 2024 C-PAC Developers
- from nipype.algorithms.confounds import _cosine_drift, _full_rank
+ try:
- input_img = nib.load(input_image_path)
- input_data = input_img.get_fdata()
+ def voxel_generator():
+ for i in range(datashape[0]):
+ for j in range(datashape[1]):
+ for k in range(datashape[2]):
+ yield input_data[i, j, k, :]
- datashape = input_data.shape
- timepoints = datashape[axis]
- if datashape[0] == 0 and failure_mode != "error":
- return input_data, np.array([])
+ from nipype.algorithms.confounds import _cosine_drift, _full_rank
- input_data = input_data.reshape((-1, timepoints))
+ input_img = nib.load(input_image_path)
+ input_data = input_img.get_fdata()
+ datashape = input_data.shape
+ timepoints = datashape[axis]
+ if datashape[0] == 0 and failure_mode != "error":
+ return input_data, np.array([])
- frametimes = timestep * np.arange(timepoints)
- X = _full_rank(_cosine_drift(period_cut, frametimes))[0]
+ frametimes = timestep * np.arange(timepoints)
+ X_full = _full_rank(_cosine_drift(period_cut, frametimes))[0]
- betas = np.linalg.lstsq(X, input_data.T)[0]
+ # Generate X with and without the mean column
+ X_with_mean = X_full
+ X_without_mean = X_full[:, :-1] if X_full.shape[1] > 1 else X_full
- if not remove_mean:
- X = X[:, :-1]
- betas = betas[:-1]
+ # Reshape the input data to bring the time dimension to the last axis if it's not already
+ if axis != -1:
+ reshaped_data = np.moveaxis(input_data, axis, -1)
+ else:
+ reshaped_data = input_data
+
+ reshaped_output_data = np.zeros_like(reshaped_data)
+
+ # Choose the appropriate X matrix
+ X = X_without_mean if remove_mean else X_with_mean
- residuals = input_data - X.dot(betas).T
+ voxel_gen = voxel_generator()
- output_data = residuals.reshape(datashape)
+ for i in range(reshaped_data.shape[0]):
+ IFLOGGER.info(
+ f"calculating {i+1} of {reshaped_data.shape[0]} row of voxels"
+ )
+ for j in range(reshaped_data.shape[1]):
+ for k in range(reshaped_data.shape[2]):
+ voxel_time_series = next(voxel_gen)
+ betas = np.linalg.lstsq(X, voxel_time_series.T, rcond=None)[0]
+
+ residuals = voxel_time_series - X.dot(betas)
+ reshaped_output_data[i, j, k, :] = residuals
+
+ # Move the time dimension back to its original position if it was reshaped
+ if axis != -1:
+ output_data = np.moveaxis(reshaped_output_data, -1, axis)
+ else:
+ output_data = reshaped_output_data
- hdr = input_img.header
- output_img = nib.Nifti1Image(output_data, header=hdr, affine=input_img.affine)
+ hdr = input_img.header
+ output_img = nib.Nifti1Image(output_data, header=hdr, affine=input_img.affine)
+ file_name = input_image_path[input_image_path.rindex("/") + 1 :]
- file_name = input_image_path[input_image_path.rindex("/") + 1 :]
+ cosfiltered_img = os.path.join(os.getcwd(), file_name)
- cosfiltered_img = os.path.join(os.getcwd(), file_name)
+ output_img.to_filename(cosfiltered_img)
- output_img.to_filename(cosfiltered_img)
+ return cosfiltered_img
- return cosfiltered_img
+ except Exception as e:
+ message = f"Error in cosine_filter: {e}"
+ IFLOGGER.error(message)
def fallback_svd(a, full_matrices=True, compute_uv=True):
diff --git a/CPAC/nuisance/utils/utils.py b/CPAC/nuisance/utils/utils.py
index db6667dcb3..9067b72ab6 100644
--- a/CPAC/nuisance/utils/utils.py
+++ b/CPAC/nuisance/utils/utils.py
@@ -139,7 +139,7 @@ def find_offending_time_points(
censor_vector[extended_censors] = 0
out_file_path = os.path.join(os.getcwd(), "censors.tsv")
- np.savetxt(out_file_path, censor_vector, fmt="%d", header="censor", comments="")
+ np.savetxt(out_file_path, censor_vector, fmt="%d", comments="")
return out_file_path
diff --git a/CPAC/pipeline/check_outputs.py b/CPAC/pipeline/check_outputs.py
index 2e55ef560d..7db2349337 100644
--- a/CPAC/pipeline/check_outputs.py
+++ b/CPAC/pipeline/check_outputs.py
@@ -59,7 +59,11 @@ def check_outputs(output_dir: str, log_dir: str, pipe_name: str, unique_id: str)
if isinstance(outputs_logger, (Logger, MockLogger)) and len(
outputs_logger.handlers
):
- outputs_log = getattr(outputs_logger.handlers[0], "baseFilename", None)
+ outputs_log = getattr(
+ MockLogger._get_first_file_handler(outputs_logger.handlers),
+ "baseFilename",
+ None,
+ )
else:
outputs_log = None
if outputs_log is None:
@@ -103,7 +107,7 @@ def check_outputs(output_dir: str, log_dir: str, pipe_name: str, unique_id: str)
try:
log_note = (
"Missing outputs have been logged in "
- f"{missing_log.handlers[0].baseFilename}"
+ f"{MockLogger._get_first_file_handler(missing_log.handlers).baseFilename}"
)
except (AttributeError, IndexError):
log_note = ""
diff --git a/CPAC/pipeline/cpac_group_runner.py b/CPAC/pipeline/cpac_group_runner.py
index 57d5cc80dc..acc594adaf 100644
--- a/CPAC/pipeline/cpac_group_runner.py
+++ b/CPAC/pipeline/cpac_group_runner.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2022-2024 C-PAC Developers
+# Copyright (C) 2022-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -143,31 +143,12 @@ def gather_nifti_globs(pipeline_output_folder, resource_list, pull_func=False):
import glob
import os
- import pandas as pd
- import pkg_resources as p
+ from CPAC.utils.outputs import group_derivatives
exts = ".nii"
nifti_globs = []
- keys_tsv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv")
- try:
- keys = pd.read_csv(keys_tsv, delimiter="\t")
- except Exception as e:
- err = (
- "\n[!] Could not access or read the cpac_outputs.tsv "
- f"resource file:\n{keys_tsv}\n\nError details {e}\n"
- )
- raise Exception(err)
-
- derivative_list = list(keys[keys["Sub-Directory"] == "func"]["Resource"])
- derivative_list = derivative_list + list(
- keys[keys["Sub-Directory"] == "anat"]["Resource"]
- )
-
- if pull_func:
- derivative_list = derivative_list + list(
- keys[keys["Space"] == "functional"]["Resource"]
- )
+ derivative_list = group_derivatives(pull_func)
if len(resource_list) == 0:
err = "\n\n[!] No derivatives selected!\n\n"
@@ -361,33 +342,14 @@ def create_output_dict_list(
"""Create a dictionary of output filepaths and their associated information."""
import os
- import pandas as pd
- import pkg_resources as p
-
if len(resource_list) == 0:
err = "\n\n[!] No derivatives selected!\n\n"
raise Exception(err)
if derivatives is None:
- keys_tsv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv")
- try:
- keys = pd.read_csv(keys_tsv, delimiter="\t")
- except Exception as e:
- err = (
- "\n[!] Could not access or read the cpac_outputs.csv "
- f"resource file:\n{keys_tsv}\n\nError details {e}\n"
- )
- raise Exception(err)
+ from CPAC.utils.outputs import group_derivatives
- derivatives = list(keys[keys["Sub-Directory"] == "func"]["Resource"])
- derivatives = derivatives + list(
- keys[keys["Sub-Directory"] == "anat"]["Resource"]
- )
-
- if pull_func:
- derivatives = derivatives + list(
- keys[keys["Space"] == "functional"]["Resource"]
- )
+ derivatives = group_derivatives(pull_func)
# remove any extra /'s
pipeline_output_folder = pipeline_output_folder.rstrip("/")
@@ -752,18 +714,10 @@ def prep_feat_inputs(group_config_file: str) -> dict:
import os
import pandas as pd
- import pkg_resources as p
- keys_tsv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv")
- try:
- keys = pd.read_csv(keys_tsv, delimiter="\t")
- except Exception as e:
- err = (
- "\n[!] Could not access or read the cpac_outputs.tsv "
- f"resource file:\n{keys_tsv}\n\nError details {e}\n"
- )
- raise Exception(err)
+ from CPAC.utils.outputs import Outputs
+ keys = Outputs.reference
derivatives = list(
keys[keys["Derivative"] == "yes"][keys["Space"] == "template"][
keys["Values"] == "z-score"
diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py
index 40811b9e77..1b64b286a8 100644
--- a/CPAC/pipeline/cpac_pipeline.py
+++ b/CPAC/pipeline/cpac_pipeline.py
@@ -148,6 +148,7 @@
coregistration_prep_vol,
create_func_to_T1template_symmetric_xfm,
create_func_to_T1template_xfm,
+ mask_sbref,
overwrite_transform_anat_to_template,
register_ANTs_anat_to_template,
register_ANTs_EPI_to_template,
@@ -710,21 +711,24 @@ def run_workflow(
]
timeHeader = dict(zip(gpaTimeFields, gpaTimeFields))
- with open(
- os.path.join(
- c.pipeline_setup["log_directory"]["path"],
- "cpac_individual_timing"
- f"_{c.pipeline_setup['pipeline_name']}.csv",
- ),
- "a",
- ) as timeCSV, open(
- os.path.join(
- c.pipeline_setup["log_directory"]["path"],
- "cpac_individual_timing_%s.csv"
- % c.pipeline_setup["pipeline_name"],
- ),
- "r",
- ) as readTimeCSV:
+ with (
+ open(
+ os.path.join(
+ c.pipeline_setup["log_directory"]["path"],
+ "cpac_individual_timing"
+ f"_{c.pipeline_setup['pipeline_name']}.csv",
+ ),
+ "a",
+ ) as timeCSV,
+ open(
+ os.path.join(
+ c.pipeline_setup["log_directory"]["path"],
+ "cpac_individual_timing_%s.csv"
+ % c.pipeline_setup["pipeline_name"],
+ ),
+ "r",
+ ) as readTimeCSV,
+ ):
timeWriter = csv.DictWriter(timeCSV, fieldnames=gpaTimeFields)
timeReader = csv.DictReader(readTimeCSV)
@@ -1285,6 +1289,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None):
coregistration_prep_mean,
coregistration_prep_fmriprep,
],
+ mask_sbref,
]
# Distortion/Susceptibility Correction
diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py
index 0110281d5d..425eefb91f 100644
--- a/CPAC/pipeline/cpac_runner.py
+++ b/CPAC/pipeline/cpac_runner.py
@@ -14,6 +14,8 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
+"""Run C-PAC pipeline as configured."""
+
from multiprocessing import Process
import os
from time import strftime
@@ -23,23 +25,24 @@
import yaml
from CPAC.longitudinal_pipeline.longitudinal_workflow import anat_longitudinal_wf
+from CPAC.pipeline.utils import get_shell
from CPAC.utils.configuration import check_pname, Configuration, set_subject
from CPAC.utils.configuration.yaml_template import upgrade_pipeline_to_1_8
from CPAC.utils.ga import track_run
-from CPAC.utils.monitoring import failed_to_start, log_nodes_cb, WFLOGGER
+from CPAC.utils.io import load_yaml
+from CPAC.utils.monitoring import failed_to_start, FMLOGGER, log_nodes_cb, WFLOGGER
+
+RECOMMENDED_MAX_PATH_LENGTH: int = 70
+"""Recommended maximum length for a working directory path."""
-# Run condor jobs
def run_condor_jobs(c, config_file, subject_list_file, p_name):
+ """Run condor jobs."""
# Import packages
import subprocess
from time import strftime
- try:
- sublist = yaml.safe_load(open(os.path.realpath(subject_list_file), "r"))
- except:
- msg = "Subject list is not in proper YAML format. Please check your file"
- raise Exception(msg)
+ sublist = load_yaml(subject_list_file, "Subject list")
cluster_files_dir = os.path.join(os.getcwd(), "cluster_files")
subject_bash_file = os.path.join(
@@ -100,9 +103,9 @@ def run_condor_jobs(c, config_file, subject_list_file, p_name):
# Create and run script for CPAC to run on cluster
def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir):
- """
- Function to build a SLURM batch job submission script and
- submit it to the scheduler via 'sbatch'.
+ """Build a SLURM batch job submission script.
+
+ Submit it to the scheduler via 'sbatch'.
"""
# Import packages
import getpass
@@ -113,18 +116,11 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir):
from indi_schedulers import cluster_templates
# Load in pipeline config
- try:
- pipeline_dict = yaml.safe_load(open(os.path.realpath(config_file), "r"))
- pipeline_config = Configuration(pipeline_dict)
- except:
- msg = "Pipeline config is not in proper YAML format. Please check your file"
- raise Exception(msg)
+ pipeline_dict = load_yaml(config_file, "Pipeline config")
+ pipeline_config = Configuration(pipeline_dict)
+
# Load in the subject list
- try:
- sublist = yaml.safe_load(open(os.path.realpath(subject_list_file), "r"))
- except:
- msg = "Subject list is not in proper YAML format. Please check your file"
- raise Exception(msg)
+ sublist = load_yaml(subject_list_file, "Subject list")
# Init variables
timestamp = str(strftime("%Y_%m_%d_%H_%M_%S"))
@@ -137,7 +133,6 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir):
time_limit = "%d:00:00" % hrs_limit
# Batch file variables
- shell = subprocess.getoutput("echo $SHELL")
user_account = getpass.getuser()
num_subs = len(sublist)
@@ -174,7 +169,7 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir):
# Set up config dictionary
config_dict = {
"timestamp": timestamp,
- "shell": shell,
+ "shell": get_shell(),
"job_name": "CPAC_" + pipeline_config.pipeline_setup["pipeline_name"],
"num_tasks": num_subs,
"queue": pipeline_config.pipeline_setup["system_config"]["on_grid"]["SGE"][
@@ -238,6 +233,7 @@ def run_cpac_on_cluster(config_file, subject_list_file, cluster_files_dir):
def run_T1w_longitudinal(sublist, cfg):
+ """Run anatomical longitudinal pipeline."""
subject_id_dict = {}
for sub in sublist:
@@ -260,7 +256,7 @@ def run_T1w_longitudinal(sublist, cfg):
)
-def run(
+def run( # noqa: PLR0915
subject_list_file,
config_file=None,
p_name=None,
@@ -322,22 +318,21 @@ def run(
config_file = os.path.realpath(config_file)
try:
if not os.path.exists(config_file):
- raise IOError
- else:
+ raise FileNotFoundError(config_file)
+ try:
+ c = Configuration(load_yaml(config_file, "Pipeline configuration"))
+ except Invalid:
try:
- c = Configuration(yaml.safe_load(open(config_file, "r")))
- except Invalid:
- try:
- upgrade_pipeline_to_1_8(config_file)
- c = Configuration(yaml.safe_load(open(config_file, "r")))
- except Exception as e:
- msg = (
- "C-PAC could not upgrade pipeline configuration file "
- f"{config_file} to v1.8 syntax"
- )
- raise RuntimeError(msg) from e
+ upgrade_pipeline_to_1_8(config_file)
+ c = Configuration(load_yaml(config_file, "Pipeline configuration"))
except Exception as e:
- raise e
+ msg = (
+ "C-PAC could not upgrade pipeline configuration file "
+ f"{config_file} to v1.8 syntax"
+ )
+ raise RuntimeError(msg) from e
+ except Exception as e:
+ raise e
except IOError as e:
msg = f"config file {config_file} doesn't exist"
raise FileNotFoundError(msg) from e
@@ -385,10 +380,10 @@ def run(
msg = "Working directory not specified"
raise Exception(msg)
- if len(c.pipeline_setup["working_directory"]["path"]) > 70:
+ if len(c.pipeline_setup["working_directory"]["path"]) > RECOMMENDED_MAX_PATH_LENGTH:
warnings.warn(
"We recommend that the working directory full path "
- "should have less then 70 characters. "
+ f"should have less then {RECOMMENDED_MAX_PATH_LENGTH} characters. "
"Long paths might not work in your operating system."
)
warnings.warn(
@@ -400,12 +395,8 @@ def run(
p_name = check_pname(p_name, c)
# Load in subject list
- try:
- if not sublist:
- sublist = yaml.safe_load(open(subject_list_file, "r"))
- except:
- msg = "Subject list is not in proper YAML format. Please check your file"
- raise FileNotFoundError(msg)
+ if not sublist:
+ sublist = load_yaml(subject_list_file, "Subject list")
# Populate subject scan map
sub_scan_map = {}
@@ -418,12 +409,12 @@ def run(
scan_ids = ["scan_anat"]
if "func" in sub:
- for id in sub["func"]:
- scan_ids.append("scan_" + str(id))
+ for _id in sub["func"]:
+ scan_ids.append("scan_" + str(_id))
if "rest" in sub:
- for id in sub["rest"]:
- scan_ids.append("scan_" + str(id))
+ for _id in sub["rest"]:
+ scan_ids.append("scan_" + str(_id))
sub_scan_map[s] = scan_ids
except Exception as e:
@@ -444,8 +435,10 @@ def run(
level="participant" if not test_config else "test",
participants=len(sublist),
)
- except:
- WFLOGGER.error("Usage tracking failed for this run.")
+ except Exception as exception:
+ WFLOGGER.error(
+ "Usage tracking failed for this run.\nDetails: %s", exception
+ )
# If we're running on cluster, execute job scheduler
if c.pipeline_setup["system_config"]["on_grid"]["run"]:
@@ -471,15 +464,20 @@ def run(
# Create working dir
if not os.path.exists(c.pipeline_setup["working_directory"]["path"]):
try:
- os.makedirs(c.pipeline_setup["working_directory"]["path"])
- except:
+ os.makedirs(
+ c.pipeline_setup["working_directory"]["path"], exist_ok=True
+ )
+ except FileExistsError:
+ FMLOGGER.warn(
+ f"Path exists: {c['pipeline_setup', 'working_directory', 'path']}"
+ )
+ except Exception as exception:
err = (
- "\n\n[!] CPAC says: Could not create the working "
- "directory: %s\n\nMake sure you have permissions "
- "to write to this directory.\n\n"
- % c.pipeline_setup["working_directory"]["path"]
+ "\n\n[!] CPAC says: Could not create the working directory: "
+ f"{c['pipeline_setup', 'working_directory', 'path']}\n\nMake sure "
+ "you have permissions to write to this directory.\n\n"
)
- raise Exception(err)
+ raise IOError(err) from exception
"""
if not os.path.exists(c.pipeline_setup['log_directory']['path']):
try:
diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py
index d7f53f7029..11f5965e8e 100644
--- a/CPAC/pipeline/engine.py
+++ b/CPAC/pipeline/engine.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2021-2024 C-PAC Developers
+# Copyright (C) 2021-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -17,6 +17,7 @@
import ast
import copy
import hashlib
+from importlib.resources import files
from itertools import chain
import json
import os
@@ -24,9 +25,12 @@
from typing import Optional
import warnings
+import pandas as pd
from nipype import config, logging
+from nipype.interfaces import afni
from nipype.interfaces.utility import Rename
+from CPAC.anat_preproc.utils import mri_convert_reorient
from CPAC.image_utils.spatial_smoothing import spatial_smoothing
from CPAC.image_utils.statistical_transforms import (
fisher_z_score_standardize,
@@ -35,7 +39,11 @@
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.check_outputs import ExpectedOutputs
from CPAC.pipeline.nodeblock import NodeBlockFunction
-from CPAC.pipeline.utils import MOVEMENT_FILTER_KEYS, name_fork, source_set
+from CPAC.pipeline.utils import (
+ MOVEMENT_FILTER_KEYS,
+ name_fork,
+ source_set,
+)
from CPAC.registration.registration import transform_derivative
from CPAC.resources.templates.lookup_table import lookup_identifier
from CPAC.utils.bids_utils import res_in_filename
@@ -59,6 +67,7 @@
from CPAC.utils.utils import (
check_prov_for_regtool,
create_id_string,
+ flip_orientation_code,
get_last_prov_entry,
read_json,
write_output_json,
@@ -413,10 +422,12 @@ def get(
if report_fetched:
return (None, None)
return None
+ from CPAC.pipeline.resource_inventory import where_to_find
+
msg = (
"\n\n[!] C-PAC says: None of the listed resources are in "
- f"the resource pool:\n\n {resource}\n\nOptions:\n- You "
- "can enable a node block earlier in the pipeline which "
+ f"the resource pool:\n\n {where_to_find(resource)}\n\nOptions:\n"
+ "- You can enable a node block earlier in the pipeline which "
"produces these resources. Check the 'outputs:' field in "
"a node block's documentation.\n- You can directly "
"provide this required data by pulling it from another "
@@ -451,7 +462,9 @@ def copy_resource(self, resource, new_name):
try:
self.rpool[new_name] = self.rpool[resource]
except KeyError:
- msg = f"[!] {resource} not in the resource pool."
+ from CPAC.pipeline.resource_inventory import where_to_find
+
+ msg = f"[!] Not in the resource pool:\n{where_to_find(resource)}"
raise Exception(msg)
def update_resource(self, resource, new_name):
@@ -623,11 +636,13 @@ def get_strats(self, resources, debug=False):
total_pool.append(sub_pool)
if not total_pool:
+ from CPAC.pipeline.resource_inventory import where_to_find
+
raise LookupError(
"\n\n[!] C-PAC says: None of the listed "
"resources in the node block being connected "
"exist in the resource pool.\n\nResources:\n"
- "%s\n\n" % resource_list
+ "%s\n\n" % where_to_find(resource_list)
)
# TODO: right now total_pool is:
@@ -1002,6 +1017,19 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs)
for label_con_tpl in post_labels:
label = label_con_tpl[0]
connection = (label_con_tpl[1], label_con_tpl[2])
+ if "desc-" not in label:
+ if "space-template" in label:
+ new_label = label.replace(
+ "space-template", "space-template_desc-zstd"
+ )
+ else:
+ new_label = f"desc-zstd_{label}"
+ else:
+ for tag in label.split("_"):
+ if "desc-" in tag:
+ newtag = f"{tag}-zstd"
+ new_label = label.replace(tag, newtag)
+ break
if label in Outputs.to_zstd:
zstd = z_score_standardize(f"{label}_zstd_{pipe_x}", input_type)
@@ -1010,20 +1038,6 @@ def post_process(self, wf, label, connection, json_info, pipe_idx, pipe_x, outs)
node, out = self.get_data(mask, pipe_idx=mask_idx)
wf.connect(node, out, zstd, "inputspec.mask")
- if "desc-" not in label:
- if "space-template" in label:
- new_label = label.replace(
- "space-template", "space-template_desc-zstd"
- )
- else:
- new_label = f"desc-zstd_{label}"
- else:
- for tag in label.split("_"):
- if "desc-" in tag:
- newtag = f"{tag}-zstd"
- new_label = label.replace(tag, newtag)
- break
-
post_labels.append((new_label, zstd, "outputspec.out_file"))
self.set_data(
@@ -1183,7 +1197,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None):
key
for json_info in all_jsons
for key in json_info.get("CpacVariant", {}).keys()
- if key not in (*MOVEMENT_FILTER_KEYS, "regressors")
+ if key not in (*MOVEMENT_FILTER_KEYS, "timeseries")
}
if "bold" in unlabelled:
all_bolds = list(
@@ -1352,6 +1366,9 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None):
wf.connect(id_string, "out_filename", nii_name, "format_string")
node, out = self.rpool[resource][pipe_idx]["data"]
+ if not node:
+ msg = f"Resource {resource} not found in resource pool."
+ raise FileNotFoundError(msg)
try:
wf.connect(node, out, nii_name, "in_file")
except OSError as os_error:
@@ -2028,9 +2045,30 @@ def ingress_freesurfer(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id):
creds_path=data_paths["creds_path"],
dl_dir=cfg.pipeline_setup["working_directory"]["path"],
)
- rpool.set_data(
- key, fs_ingress, "outputspec.data", {}, "", f"fs_{key}_ingress"
- )
+ # reorient *.mgz
+ if outfile.endswith(".mgz"):
+ reorient_mgz = pe.Node(
+ Function(
+ input_names=["in_file", "orientation", "out_file"],
+ output_names=["out_file"],
+ function=mri_convert_reorient,
+ ),
+ name=f"reorient_mgz_{key}",
+ )
+ # Flip orientation before reorient because mri_convert's orientation is opposite that of AFNI
+ reorient_mgz.inputs.orientation = flip_orientation_code(
+ cfg.pipeline_setup["desired_orientation"]
+ )
+ reorient_mgz.inputs.out_file = None
+ wf.connect(fs_ingress, "outputspec.data", reorient_mgz, "in_file")
+
+ rpool.set_data(
+ key, reorient_mgz, "out_file", {}, "", f"fs_{key}_ingress"
+ )
+ else:
+ rpool.set_data(
+ key, fs_ingress, "outputspec.data", {}, "", f"fs_{key}_ingress"
+ )
else:
warnings.warn(
str(LookupError(f"\n[!] Path does not exist for {fullpath}.\n"))
@@ -2403,15 +2441,18 @@ def strip_template(data_label, dir_path, filename):
return data_label, json
-def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None):
+def template_dataframe() -> pd.DataFrame:
+ """Return the template dataframe."""
+ template_csv = files("CPAC").joinpath("resources/cpac_templates.csv")
+ return pd.read_csv(str(template_csv), keep_default_na=False)
+
+
+def ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path=None):
# ingress config file paths
# TODO: may want to change the resource keys for each to include one level up in the YAML as well
- import pandas as pd
- import pkg_resources as p
-
- template_csv = p.resource_filename("CPAC", "resources/cpac_templates.csv")
- template_df = pd.read_csv(template_csv, keep_default_na=False)
+ template_df = template_dataframe()
+ desired_orientation = cfg.pipeline_setup["desired_orientation"]
for row in template_df.itertuples():
key = row.Key
@@ -2468,7 +2509,13 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None):
resampled_template = pe.Node(
Function(
- input_names=["resolution", "template", "template_name", "tag"],
+ input_names=[
+ "orientation",
+ "resolution",
+ "template",
+ "template_name",
+ "tag",
+ ],
output_names=["resampled_template"],
function=resolve_resolution,
as_module=True,
@@ -2476,24 +2523,15 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None):
name="resampled_" + key,
)
+ resampled_template.inputs.orientation = desired_orientation
resampled_template.inputs.resolution = resolution
resampled_template.inputs.template = val
resampled_template.inputs.template_name = key
resampled_template.inputs.tag = tag
- # the set_data below is set up a little differently, because we are
- # injecting and also over-writing already-existing entries
- # other alternative would have been to ingress into the
- # resampled_template node from the already existing entries, but we
- # didn't do that here
- rpool.set_data(
- key,
- resampled_template,
- "resampled_template",
- json_info,
- "",
- "template_resample",
- ) # pipe_idx (after the blank json {}) should be the previous strat that you want deleted! because you're not connecting this the regular way, you have to do it manually
+ node = resampled_template
+ output = "resampled_template"
+ node_name = "template_resample"
elif val:
config_ingress = create_general_datasource(f"gather_{key}")
@@ -2503,14 +2541,33 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None):
creds_path=creds_path,
dl_dir=cfg.pipeline_setup["working_directory"]["path"],
)
- rpool.set_data(
- key,
- config_ingress,
- "outputspec.data",
- json_info,
- "",
- f"{key}_config_ingress",
- )
+ node = config_ingress
+ output = "outputspec.data"
+ node_name = f"{key}_config_ingress"
+
+ if val.endswith(".nii" or ".nii.gz"):
+ check_reorient = pe.Node(
+ interface=afni.Resample(),
+ name=f"reorient_{key}",
+ )
+
+ check_reorient.inputs.orientation = desired_orientation
+ check_reorient.inputs.outputtype = "NIFTI_GZ"
+
+ wf.connect(node, output, check_reorient, "in_file")
+ node = check_reorient
+ output = "out_file"
+ node_name = f"{key}_reorient"
+
+ rpool.set_data(
+ key,
+ node,
+ output,
+ json_info,
+ "",
+ node_name,
+ )
+
# templates, resampling from config
"""
template_keys = [
@@ -2596,8 +2653,7 @@ def _set_nested(attr, keys):
)
cfg.set_nested(cfg, key, node)
"""
-
- return rpool
+ return wf, rpool
def initiate_rpool(wf, cfg, data_paths=None, part_id=None):
@@ -2668,7 +2724,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None):
)
# grab any file paths from the pipeline config YAML
- rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path)
+ wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, unique_id, creds_path)
# output files with 4 different scans
diff --git a/CPAC/pipeline/resource_inventory.py b/CPAC/pipeline/resource_inventory.py
new file mode 100755
index 0000000000..a181ea6567
--- /dev/null
+++ b/CPAC/pipeline/resource_inventory.py
@@ -0,0 +1,670 @@
+#!/usr/bin/env python
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Inspect inputs and outputs for NodeBlockFunctions."""
+
+from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace
+import ast
+from collections.abc import Hashable
+from dataclasses import dataclass, field
+import importlib
+from importlib.resources import files
+import inspect
+from itertools import chain, product
+import os
+from pathlib import Path
+import re
+from typing import Any, cast, Iterable, Optional
+from unittest.mock import patch
+
+from traits.trait_errors import TraitError
+import yaml
+
+from CPAC.pipeline.engine import template_dataframe
+from CPAC.pipeline.nodeblock import NodeBlockFunction
+from CPAC.pipeline.schema import latest_schema
+from CPAC.utils.monitoring import UTLOGGER
+from CPAC.utils.outputs import Outputs
+
+ONE_OFFS: dict[str, list[str]] = {
+ r".*desc-preproc_bold": ["func_ingress"],
+ r".*-sm.*": [
+ f"spatial_smoothing_{smooth_opt}"
+ for smooth_opt in latest_schema.schema["post_processing"]["spatial_smoothing"][
+ "smoothing_method"
+ ][0].container
+ ],
+ r".*-zstd.*": [f"{fisher}zscore_standardize" for fisher in ["", "fisher_"]],
+}
+"""A few out-of-nodeblock generated resources.
+
+Easier to note these manually than to code up the AST rules."""
+
+SKIPS: list[str] = [
+ "CPAC.unet.__init__",
+ "CPAC.unet._torch",
+]
+"""No nodeblock functions in these modules that dynamically install `torch`."""
+
+
+def import_nodeblock_functions(
+ package_name: str, exclude: Optional[list[str]] = None
+) -> list[NodeBlockFunction]:
+ """
+ Import all functions with the @nodeblock decorator from all modules and submodules in a package.
+
+ Parameters
+ ----------
+ package_name
+ The name of the package to import from.
+
+ exclude
+ A list of module names to exclude from the import.
+ """
+ if exclude is None:
+ exclude = []
+ functions: list[NodeBlockFunction] = []
+ package = importlib.import_module(package_name)
+ package_path = package.__path__[0] # Path to the package directory
+
+ for root, _, package_files in os.walk(package_path):
+ for file in package_files:
+ if file.endswith(".py") and file != "__init__.py":
+ # Get the module path
+ rel_path = os.path.relpath(os.path.join(root, file), package_path)
+ module_name = f"{package_name}.{rel_path[:-3].replace(os.sep, '.')}"
+ if module_name in exclude:
+ continue
+
+ # Import the module
+ try:
+ with patch.dict(
+ "sys.modules", {exclusion: None for exclusion in exclude}
+ ):
+ module = importlib.import_module(module_name)
+ except (ImportError, TraitError, ValueError) as e:
+ UTLOGGER.debug(f"Failed to import {module_name}: {e}")
+ continue
+ # Extract nodeblock-decorated functions from the module
+ for _name, obj in inspect.getmembers(
+ module, predicate=lambda obj: isinstance(obj, NodeBlockFunction)
+ ):
+ functions.append(obj)
+
+ return functions
+
+
+@dataclass
+class ResourceSourceList:
+ """A list of resource sources without duplicates."""
+
+ sources: list[str] = field(default_factory=list)
+
+ def __add__(self, other: "str | list[str] | ResourceSourceList") -> list[str]:
+ """Add a list of sources to the list."""
+ if isinstance(other, str):
+ if not other or other == "created_before_this_test":
+ # dummy node in a testing function, no need to include in inventory
+ return list(self)
+ other = [other]
+ new_set = {*self.sources, *other}
+ return sorted(new_set, key=str.casefold)
+
+ def __contains__(self, item: str) -> bool:
+ """Check if a source is in the list."""
+ return item in self.sources
+
+ def __delitem__(self, key: int) -> None:
+ """Delete a source by index."""
+ del self.sources[key]
+
+ def __eq__(self, value: Any) -> bool:
+ """Check if the lists of sources are the same."""
+ return set(self) == set(value)
+
+ def __getitem__(self, item: int) -> str:
+ """Get a source by index."""
+ return self.sources[item]
+
+ def __hash__(self) -> int:
+ """Get the hash of the list of sources."""
+ return hash(self.sources)
+
+ def __iadd__(
+ self, other: "str | list[str] | ResourceSourceList"
+ ) -> "ResourceSourceList":
+ """Add a list of sources to the list."""
+ self.sources = self + other
+ return self
+
+ def __iter__(self):
+ """Iterate over the sources."""
+ return iter(self.sources)
+
+ def __len__(self) -> int:
+ """Get the number of sources."""
+ return len(self.sources)
+
+ def __repr__(self) -> str:
+ """Get the reproducable string representation of the sources."""
+ return f"ResourceSourceList({(self.sources)})"
+
+ def __reversed__(self) -> list[str]:
+ """Get the sources reversed."""
+ return list(reversed(self.sources))
+
+ def __setitem__(self, key: int, value: str) -> None:
+ """Set a source by index."""
+ self.sources[key] = value
+
+ def __sorted__(self) -> list[str]:
+ """Get the sources sorted."""
+ return sorted(self.sources, key=str.casefold)
+
+ def __str__(self) -> str:
+ """Get the string representation of the sources."""
+ return str(self.sources)
+
+
+@dataclass
+class ResourceIO:
+ """NodeBlockFunctions that use a resource for IO."""
+
+ name: str
+ """The name of the resource."""
+ output_from: ResourceSourceList | list[str] = field(
+ default_factory=ResourceSourceList
+ )
+ """The functions that output the resource."""
+ output_to: ResourceSourceList | list[str] = field(
+ default_factory=ResourceSourceList
+ )
+ """The subdirectory the resource is output to."""
+ input_for: ResourceSourceList | list[str] = field(
+ default_factory=ResourceSourceList
+ )
+ """The functions that use the resource as input."""
+
+ def __post_init__(self) -> None:
+ """Handle optionals."""
+ if isinstance(self.output_from, list):
+ self.output_from = ResourceSourceList(self.output_from)
+ if isinstance(self.output_to, list):
+ self.output_to = ResourceSourceList(self.output_to)
+ if isinstance(self.input_for, list):
+ self.input_for = ResourceSourceList(self.input_for)
+
+ def __str__(self) -> str:
+ """Return string representation for ResourceIO instance."""
+ return f"{{{self.name}: {{'input_for': {self.input_for!s}, 'output_from': {self.output_from!s}}}}})"
+
+ def as_dict(self) -> dict[str, list[str]]:
+ """Return the ResourceIO as a built-in dictionary type."""
+ return {
+ k: v
+ for k, v in {
+ "input_for": [str(source) for source in self.input_for],
+ "output_from": [str(source) for source in self.output_from],
+ "output_to": [str(source) for source in self.output_to],
+ }.items()
+ if v
+ }
+
+
+def cli_parser() -> Namespace:
+ """Parse command line argument."""
+ parser = ArgumentParser(
+ description="Inventory resources for C-PAC NodeBlockFunctions.",
+ formatter_class=ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument(
+ "-o",
+ "--output",
+ nargs="?",
+ help="The output file to write the inventory to.",
+ type=Path,
+ default=Path("resource_inventory.yaml"),
+ )
+ return parser.parse_args()
+
+
+def _flatten_io(io: Iterable[Iterable]) -> list[str]:
+ """Given a list of strings or iterables thereof, flatten the list to all strings."""
+ if all(isinstance(resource, str) for resource in io):
+ return cast(list[str], io)
+ while not all(isinstance(resource, str) for resource in io):
+ io = list(
+ chain.from_iterable(
+ [
+ resource if not isinstance(resource, str) else [resource]
+ for resource in io
+ ]
+ )
+ )
+ return cast(list[str], io)
+
+
+class MultipleContext(list):
+ """Subclass of list to store multilpe contexts."""
+
+ def __init__(self, /, *args, **kwargs) -> None:
+ """Initialize MultipleContext."""
+ super().__init__(*args, **kwargs)
+ data = self._unique(self)
+ self.clear()
+ self.extend(data)
+
+ def __hash__(self) -> int:
+ """Hash a MultipleContext instance."""
+ return hash(str(self))
+
+ def __str__(self) -> str:
+ """Return a stringified MultipleContext instance."""
+ if len(self) == 1:
+ return str(self[0])
+ return super().__str__()
+
+ def append(self, item: Any) -> None:
+ """Append if not already included."""
+ if item not in self:
+ super().append(item)
+
+ def extend(self, iterable: Iterable) -> None:
+ """Extend MultipleContext."""
+ for item in iterable:
+ self.append(item)
+
+ @staticmethod
+ def _unique(iterable: Iterable) -> list:
+ """Dedupe."""
+ try:
+ seen = set()
+ return [x for x in iterable if not (x in seen or seen.add(x))]
+ except TypeError:
+ seen = set()
+ return [
+ x
+ for x in (MultipleContext(item) for item in iterable)
+ if not (x in seen or seen.add(x))
+ ]
+
+
+class DirectlySetResources(ast.NodeVisitor):
+ """Class to track resources set directly, rather than through NodeBlocks."""
+
+ def __init__(self) -> None:
+ """Initialize the visitor."""
+ super().__init__()
+ self._context: dict[str, Any] = {}
+ self.dynamic_resources: dict[str, ResourceSourceList] = {
+ resource: ResourceSourceList(sources)
+ for resource, sources in ONE_OFFS.items()
+ }
+ self._history: dict[str, list[Any]] = {}
+ self.resources: dict[str, ResourceSourceList] = {}
+
+ def assign_resource(self, resource: str, value: str | MultipleContext) -> None:
+ """Assign a value to a resource."""
+ if isinstance(resource, ast.AST):
+ resource = self.parse_ast(resource)
+ resource = str(resource)
+ if isinstance(value, MultipleContext):
+ for subvalue in value:
+ self.assign_resource(resource, subvalue)
+ return
+ target = (
+ self.dynamic_resources
+ if r".*" in value or r".*" in resource
+ else self.resources
+ )
+ if resource not in target:
+ target[resource] = ResourceSourceList()
+ target[resource] += value
+
+ @property
+ def context(self) -> dict[str, Any]:
+ """Return the context."""
+ return self._context
+
+ @context.setter
+ def context(self, value: tuple[Iterable, Any]) -> None:
+ """Set the context."""
+ key, _value = value
+ if not isinstance(key, str):
+ for subkey in key:
+ self.context = subkey, _value
+ else:
+ self._context[key] = _value
+ if key not in self._history:
+ self._history[key] = [".*"]
+ self._history[key].append(_value)
+
+ def lookup_context(
+ self, variable: str, return_type: Optional[type] = None
+ ) -> str | MultipleContext:
+ """Plug in variable."""
+ if variable in self.context:
+ if self.context[variable] == variable or (
+ return_type and not isinstance(self.context[variable], return_type)
+ ):
+ history = list(self._history[variable])
+ while history and history[-1] == variable:
+ history.pop()
+ if history:
+ context = history[-1]
+ while (
+ return_type
+ and len(history)
+ and not isinstance(context, return_type)
+ ):
+ context = history.pop()
+ if return_type and not isinstance(context, return_type):
+ return ".*"
+ return context
+ return self.context[variable]
+ return ".*"
+
+ @staticmethod
+ def handle_multiple_contexts(contexts: list[str | list[str]]) -> list[str]:
+ """Parse multiple contexts."""
+ if isinstance(contexts, list):
+ return MultipleContext(
+ [
+ "".join(list(ctx))
+ for ctx in product(
+ *[
+ context if isinstance(context, list) else [context]
+ for context in contexts
+ ]
+ )
+ ]
+ )
+ return contexts
+
+ def parse_ast(self, node: Any) -> Any:
+ """Parse AST."""
+ if not isinstance(node, ast.AST):
+ if isinstance(node, str) or not isinstance(node, Iterable):
+ return str(node)
+ if isinstance(node, ast.Dict):
+ return {
+ self.parse_ast(key): self.parse_ast(value)
+ for key, value in dict(zip(node.keys, node.values)).items()
+ }
+ if isinstance(node, (MultipleContext, list, set, tuple)):
+ return type(node)(self.parse_ast(subnode) for subnode in node)
+ if isinstance(node, ast.FormattedValue):
+ if hasattr(node, "value") and hasattr(node.value, "id"):
+ return self.lookup_context(getattr(node.value, "id"))
+ if isinstance(node, ast.JoinedStr):
+ node_values = [self.parse_ast(value) for value in node.values]
+ if any(isinstance(value, MultipleContext) for value in node_values):
+ return self.handle_multiple_contexts(node_values)
+ return "".join(str(item) for item in node_values)
+ if isinstance(node, ast.Dict):
+ return {
+ self.parse_ast(key)
+ if isinstance(self.parse_ast(key), Hashable)
+ else ".*": self.parse_ast(value)
+ for key, value in dict(zip(node.keys, node.values)).items()
+ }
+ if not isinstance(node, ast.Call):
+ for attr in ["values", "elts", "args"]:
+ if hasattr(node, attr):
+ iterable = getattr(node, attr)
+ if isinstance(iterable, Iterable):
+ return [
+ self.parse_ast(subnode) for subnode in getattr(node, attr)
+ ]
+ return self.parse_ast(iterable)
+ for attr in ["value", "id", "arg"]:
+ if hasattr(node, attr):
+ return self.parse_ast(getattr(node, attr))
+ elif (
+ hasattr(node, "func")
+ and getattr(node.func, "attr", None) in ["items", "keys", "values"]
+ and getattr(getattr(node.func, "value", None), "id", None) in self.context
+ ):
+ context = self.lookup_context(node.func.value.id, return_type=dict)
+ if isinstance(context, dict):
+ return MultipleContext(getattr(context, node.func.attr)())
+ return r".*" # wildcard for regex matching
+
+ def visit_Assign(self, node: ast.Assign) -> None:
+ """Visit an assignment."""
+ value = self.parse_ast(node.value)
+ if value == "row" and getattr(node.value, "attr", None):
+ # hack for template dataframe
+ value = MultipleContext(getattr(template_dataframe(), node.value.attr))
+ for target in node.targets:
+ resource = self.parse_ast(target)
+ self.context = resource, value
+ self.generic_visit(node)
+
+ def visit_Call(self, node: ast.Call) -> None:
+ """Visit a function call."""
+ if isinstance(node.func, ast.Attribute) and node.func.attr == "set_data":
+ value = self.parse_ast(node.args[5])
+ if isinstance(node.args[5], ast.Name):
+ if isinstance(value, str):
+ value = self.lookup_context(value)
+ if hasattr(node.args[0], "value"):
+ resource: str = getattr(node.args[0], "value")
+ elif hasattr(node.args[0], "id"):
+ resource = self.lookup_context(getattr(node.args[0], "id"))
+ if isinstance(resource, MultipleContext):
+ if len(resource) == len(value):
+ for k, v in zip(resource, value):
+ self.assign_resource(k, v)
+ else:
+ for resource_context in resource:
+ self.assign_resource(resource_context, value)
+ self.generic_visit(node)
+ return
+ elif isinstance(node.args[0], ast.JoinedStr):
+ resource = self.parse_ast(node.args[0])
+ else:
+ self.generic_visit(node)
+ return
+ self.assign_resource(resource, value)
+ self.generic_visit(node)
+
+ def visit_For(self, node: ast.For) -> None:
+ """Vist for loop."""
+ target = self.parse_ast(node.target)
+ if (
+ hasattr(node.iter, "func")
+ and hasattr(node.iter.func, "value")
+ and hasattr(node.iter.func.value, "id")
+ ):
+ context = self.parse_ast(node.iter)
+ if not context:
+ context = r".*"
+ if isinstance(target, list):
+ target_len = len(target)
+ if isinstance(context, dict):
+ self.context = target[0], MultipleContext(context.keys())
+ if isinstance(context, list) and all(
+ (isinstance(item, tuple) and len(item) == target_len)
+ for item in context
+ ):
+ for index, item in enumerate(target):
+ self.context = (
+ item,
+ MultipleContext(
+ subcontext[index] for subcontext in context
+ ),
+ )
+ elif hasattr(node.iter, "value") and (
+ getattr(node.iter.value, "id", None) == "self"
+ or getattr(node.iter, "attr", False)
+ ):
+ self.context = target, ".*"
+ else:
+ self.context = target, self.parse_ast(node.iter)
+ self.generic_visit(node)
+
+ def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
+ """Visit a function definition."""
+ if node.name == "set_data":
+ # skip the method definition
+ return
+ for arg in self.parse_ast(node):
+ self.context = arg, ".*"
+ self.generic_visit(node)
+
+
+def find_directly_set_resources(
+ package_name: str,
+) -> tuple[dict[str, ResourceSourceList], dict[str, ResourceSourceList]]:
+ """Find all resources set explicitly via :pyy:method:`~CPAC.pipeline.engine.ResourcePool.set_data`.
+
+ Parameters
+ ----------
+ package_name
+ The name of the package to search for resources.
+
+ Returns
+ -------
+ dict
+ A dictionary containing the name of the resource and the name of the functions that set it.
+
+ dict
+ A dictionary containing regex strings for special cases
+ """
+ resources: dict[str, ResourceSourceList] = {}
+ dynamic_resources: dict[str, ResourceSourceList] = {}
+ for dirpath, _, filenames in os.walk(str(files(package_name))):
+ for filename in filenames:
+ if filename.endswith(".py"):
+ filepath = os.path.join(dirpath, filename)
+ with open(filepath, "r", encoding="utf-8") as file:
+ tree = ast.parse(file.read(), filename=filepath)
+ directly_set = DirectlySetResources()
+ directly_set.visit(tree)
+ for resource in directly_set.resources:
+ if resource not in resources:
+ resources[resource] = ResourceSourceList()
+ resources[resource] += directly_set.resources[resource]
+ for resource in directly_set.dynamic_resources:
+ if resource not in dynamic_resources:
+ dynamic_resources[resource] = ResourceSourceList()
+ dynamic_resources[resource] += directly_set.dynamic_resources[
+ resource
+ ]
+ return resources, dynamic_resources
+
+
+def resource_inventory(package: str = "CPAC") -> dict[str, ResourceIO]:
+ """Gather all inputs and outputs for a list of NodeBlockFunctions."""
+ resources: dict[str, ResourceIO] = {}
+ # Node block function inputs and outputs
+ for nbf in import_nodeblock_functions(
+ package,
+ exclude=SKIPS,
+ ):
+ nbf_name = f"{nbf.name} ({nbf.__module__}.{nbf.__qualname__})"
+ if hasattr(nbf, "inputs"):
+ for nbf_input in _flatten_io(cast(list[Iterable], nbf.inputs)):
+ if nbf_input:
+ if nbf_input not in resources:
+ resources[nbf_input] = ResourceIO(
+ nbf_input, input_for=[nbf_name]
+ )
+ else:
+ resources[nbf_input].input_for += nbf_name
+ if hasattr(nbf, "outputs"):
+ for nbf_output in _flatten_io(cast(list[Iterable], nbf.outputs)):
+ if nbf_output:
+ if nbf_output not in resources:
+ resources[nbf_output] = ResourceIO(
+ nbf_output, output_from=[nbf_name]
+ )
+ else:
+ resources[nbf_output].output_from += nbf_name
+ # Template resources set from pipeline config
+ templates_from_config_df = template_dataframe()
+ for _, row in templates_from_config_df.iterrows():
+ output_from = f"pipeline configuration: {row.Pipeline_Config_Entry}"
+ if row.Key not in resources:
+ resources[row.Key] = ResourceIO(row.Key, output_from=[output_from])
+ else:
+ resources[row.Key].output_from += output_from
+ # Hard-coded resources
+ direct, dynamic = find_directly_set_resources(package)
+ for resource, functions in direct.items():
+ if resource not in resources:
+ resources[resource] = ResourceIO(resource, output_from=functions)
+ else:
+ resources[resource].output_from += functions
+ # Outputs
+ for _, row in Outputs.reference.iterrows():
+ if row.Resource not in resources:
+ resources[row.Resource] = ResourceIO(
+ row.Resource, output_to=[row["Sub-Directory"]]
+ )
+ else:
+ resources[row.Resource].output_to += row["Sub-Directory"]
+ # Special cases
+ for dynamic_key, dynamic_value in dynamic.items():
+ if dynamic_key != r".*":
+ dynamic_resource = re.compile(dynamic_key)
+ for resource in resources.keys():
+ if dynamic_resource.search(resource):
+ resources[resource].output_from += dynamic_value
+ if "interface" in resources:
+ # this is a loop in setting up nodeblocks
+ # https://github.com/FCP-INDI/C-PAC/blob/61ad414447023daf0e401a81c92267b09c64ed94/CPAC/pipeline/engine.py#L1453-L1464
+ # it's already handled in the NodeBlock resources
+ del resources["interface"]
+ return dict(sorted(resources.items(), key=lambda item: item[0].casefold()))
+
+
+def dump_inventory_to_yaml(inventory: dict[str, ResourceIO]) -> str:
+ """Dump NodeBlock Interfaces to a YAML string."""
+ return yaml.dump(
+ {key: value.as_dict() for key, value in inventory.items()}, sort_keys=False
+ )
+
+
+def where_to_find(resources: list[str] | str) -> str:
+ """Return a multiline string describing where each listed resource is output from."""
+ if isinstance(resources, str):
+ resources = [resources]
+ resources = _flatten_io(resources)
+ inventory = resource_inventory("CPAC")
+ output = ""
+ for resource in resources:
+ output += f"'{resource}' can be output from:\n"
+ if resource in inventory:
+ for source in inventory[resource].output_from:
+ output += f" {source}\n"
+ else:
+ output += " !! Nowhere !!\n"
+ output += "\n"
+ return output.rstrip()
+
+
+def main() -> None:
+ """Save the NodeBlock inventory to a file."""
+ args = cli_parser()
+ with args.output.open("w") as file:
+ file.write(dump_inventory_to_yaml(resource_inventory("CPAC")))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py
index 915cb47045..828c0b1aec 100644
--- a/CPAC/pipeline/schema.py
+++ b/CPAC/pipeline/schema.py
@@ -423,6 +423,9 @@ def sanitize(filename):
"skip env check": Maybe(bool), # flag for skipping an environment check
"pipeline_setup": {
"pipeline_name": All(str, Length(min=1), sanitize),
+ "desired_orientation": In(
+ {"RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"}
+ ),
"output_directory": {
"path": str,
"source_outputs_dir": Maybe(str),
@@ -507,6 +510,7 @@ def sanitize(filename):
"Debugging": {
"verbose": bool1_1,
},
+ "freesurfer_dir": str,
"outdir_ingress": {
"run": bool1_1,
"Template": Maybe(str),
@@ -635,6 +639,9 @@ def sanitize(filename):
},
"FreeSurfer-BET": {"T1w_brain_template_mask_ccs": Maybe(str)},
},
+ "restore_t1w_intensity": {
+ "run": bool1_1,
+ },
},
"segmentation": {
"run": bool1_1,
@@ -709,8 +716,6 @@ def sanitize(filename):
"interpolation": In({"trilinear", "sinc", "spline"}),
"identity_matrix": Maybe(str),
"ref_mask": Maybe(str),
- "ref_mask_res-2": Maybe(str),
- "T1w_template_res-2": Maybe(str),
},
},
"overwrite_transform": {
@@ -721,15 +726,12 @@ def sanitize(filename):
"functional_registration": {
"coregistration": {
"run": bool1_1,
- "reference": In({"brain", "restore-brain"}),
"interpolation": In({"trilinear", "sinc", "spline"}),
"using": str,
- "input": str,
"cost": str,
"dof": int,
"arguments": Maybe(str),
"func_input_prep": {
- "reg_with_skull": bool1_1,
"input": [
In(
{
@@ -740,7 +742,10 @@ def sanitize(filename):
)
],
"Mean Functional": {"n4_correct_func": bool1_1},
- "Selected Functional Volume": {"func_reg_input_volume": int},
+ "Selected Functional Volume": {
+ "func_reg_input_volume": int,
+ },
+ "mask_sbref": bool1_1,
},
"boundary_based_registration": {
"run": forkable,
@@ -813,6 +818,8 @@ def sanitize(filename):
"surface_analysis": {
"abcd_prefreesurfer_prep": {
"run": bool1_1,
+ "ref_mask_res-2": Maybe(str),
+ "T1w_template_res-2": Maybe(str),
},
"freesurfer": {
"run_reconall": bool1_1,
@@ -1033,7 +1040,14 @@ def sanitize(filename):
{
"Name": Required(str),
"Censor": {
- "method": str,
+ "method": In(
+ [
+ "Kill",
+ "Zero",
+ "Interpolate",
+ "SpikeRegression",
+ ]
+ ),
"thresholds": [
{
"type": str,
@@ -1385,6 +1399,22 @@ def schema(config_dict):
" Try turning one option off.\n "
)
raise ExclusiveInvalid(msg)
+
+ overwrite = partially_validated["registration_workflows"][
+ "anatomical_registration"
+ ]["overwrite_transform"]
+
+ if (
+ overwrite["run"]
+ and "ANTS"
+ not in partially_validated["registration_workflows"][
+ "anatomical_registration"
+ ]["registration"]["using"]
+ ):
+ raise ExclusiveInvalid(
+ "[!] Overwrite transform method is the same as the anatomical registration method! "
+ "No need to overwrite transform with the same registration method. Please turn it off or use a different registration method."
+ )
except KeyError:
pass
try:
diff --git a/CPAC/pipeline/test/test_cpac_group_runner.py b/CPAC/pipeline/test/test_cpac_group_runner.py
index d8a218ca19..6c20341ede 100644
--- a/CPAC/pipeline/test/test_cpac_group_runner.py
+++ b/CPAC/pipeline/test/test_cpac_group_runner.py
@@ -14,12 +14,11 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from logging import basicConfig, INFO
+
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.pipeline.test")
-basicConfig(format="%(message)s", level=INFO)
def run_gather_outputs_func(pipeline_out_dir):
diff --git a/CPAC/pipeline/test/test_cpac_runner.py b/CPAC/pipeline/test/test_cpac_runner.py
index 7ee91f5125..1e43a3e3b6 100644
--- a/CPAC/pipeline/test/test_cpac_runner.py
+++ b/CPAC/pipeline/test/test_cpac_runner.py
@@ -1,13 +1,23 @@
import os
+from pathlib import Path
import pkg_resources as p
import pytest
from CPAC.pipeline.cpac_pipeline import load_cpac_pipe_config
from CPAC.pipeline.cpac_runner import run_T1w_longitudinal
+from CPAC.pipeline.utils import get_shell
from CPAC.utils.bids_utils import create_cpac_data_config
+def test_shell() -> None:
+ """Test that ``get_shell`` returns a path to an executable BASH."""
+ shell: str = get_shell()
+ assert shell.lower().endswith("bash"), "Default shell isn't BASH?"
+ assert Path(shell).exists(), "No default shell found."
+ assert os.access(shell, os.X_OK), "Default shell not executable."
+
+
@pytest.mark.skip(reason="not a pytest test")
def test_run_T1w_longitudinal(bids_dir, cfg, test_dir, part_id):
sub_data_list = create_cpac_data_config(
diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py
index c228fc3640..25b16d9e44 100644
--- a/CPAC/pipeline/test/test_engine.py
+++ b/CPAC/pipeline/test/test_engine.py
@@ -1,5 +1,27 @@
+# Copyright (C) 2021-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Unit tests for the C-PAC pipeline engine."""
+
+from argparse import Namespace
import os
+from pathlib import Path
+from typing import cast
+from _pytest.logging import LogCaptureFixture
import pytest
from CPAC.pipeline.cpac_pipeline import (
@@ -90,7 +112,7 @@ def test_ingress_pipeconfig_data(pipe_config, bids_dir, test_dir):
rpool = ResourcePool(name=unique_id, cfg=cfg)
- rpool = ingress_pipeconfig_paths(cfg, rpool, sub_data_dct, unique_id)
+ wf, rpool = ingress_pipeconfig_paths(wf, cfg, rpool, sub_data_dct, unique_id)
rpool.gather_pipes(wf, cfg, all=True)
@@ -138,17 +160,129 @@ def test_build_workflow(pipe_config, bids_dir, test_dir):
wf.run()
+def test_missing_resource(
+ bids_examples: Path, caplog: LogCaptureFixture, tmp_path: Path
+) -> None:
+ """Test the error message thrown when a resource is missing."""
+ from datetime import datetime
+
+ import yaml
+
+ from CPAC.pipeline.cpac_runner import run
+ from CPAC.utils.bids_utils import sub_list_filter_by_labels
+ from CPAC.utils.configuration import Preconfiguration, set_subject
+ from CPAC.utils.configuration.yaml_template import create_yaml_from_template
+
+ st = datetime.now().strftime("%Y-%m-%dT%H-%M-%SZ")
+ namespace = Namespace(
+ bids_dir=str(bids_examples / "ds113b"),
+ output_dir=str(tmp_path / "output"),
+ analysis_level="test_config",
+ participant_label="sub-01",
+ )
+ c = Preconfiguration("anat-only")
+ c["pipeline_setup", "output_directory", "path"] = namespace.output_dir
+ c["pipeline_setup", "log_directory", "path"] = str(tmp_path / "logs")
+ c["pipeline_setup", "working_directory", "path"] = str(tmp_path / "work")
+ c["pipeline_setup", "system_config", "maximum_memory_per_participant"] = 1.0
+ c["pipeline_setup", "system_config", "max_cores_per_participant"] = 1
+ c["pipeline_setup", "system_config", "num_participants_at_once"] = 1
+ c["pipeline_setup", "system_config", "num_ants_threads"] = 1
+ c["pipeline_setup", "working_directory", "remove_working_dir"] = True
+ sub_list = create_cpac_data_config(
+ namespace.bids_dir,
+ namespace.participant_label,
+ None,
+ True,
+ only_one_anat=False,
+ )
+ sub_list = sub_list_filter_by_labels(list(sub_list), {"T1w": None, "bold": None})
+ for i, sub in enumerate(sub_list):
+ if isinstance(sub.get("anat"), dict):
+ for anat_key in sub["anat"]:
+ if isinstance(sub["anat"][anat_key], list) and len(
+ sub["anat"][anat_key]
+ ):
+ sub_list[i]["anat"][anat_key] = sub["anat"][anat_key][0]
+ if isinstance(sub.get("anat"), list) and len(sub["anat"]):
+ sub_list[i]["anat"] = sub["anat"][0]
+ data_config_file = f"cpac_data_config_{st}.yml"
+ sublogdirs = [set_subject(sub, c)[2] for sub in sub_list]
+ # write out the data configuration file
+ data_config_file = os.path.join(sublogdirs[0], data_config_file)
+ with open(data_config_file, "w", encoding="utf-8") as _f:
+ noalias_dumper = yaml.dumper.SafeDumper
+ noalias_dumper.ignore_aliases = lambda self, data: True
+ yaml.dump(sub_list, _f, default_flow_style=False, Dumper=noalias_dumper)
+
+ # update and write out pipeline config file
+ pipeline_config_file = os.path.join(sublogdirs[0], f"cpac_pipeline_config_{st}.yml")
+ with open(pipeline_config_file, "w", encoding="utf-8") as _f:
+ _f.write(create_yaml_from_template(c))
+ minimized_config = f"{pipeline_config_file[:-4]}_min.yml"
+ with open(minimized_config, "w", encoding="utf-8") as _f:
+ _f.write(create_yaml_from_template(c, import_from="blank"))
+ for config_file in (data_config_file, pipeline_config_file, minimized_config):
+ os.chmod(config_file, 0o444) # Make config files readonly
+
+ if len(sublogdirs) > 1:
+ # If more than one run is included in the given data config
+ # file, an identical copy of the data and pipeline config
+ # will be included in the log directory for each run
+ for sublogdir in sublogdirs[1:]:
+ for config_file in (
+ data_config_file,
+ pipeline_config_file,
+ minimized_config,
+ ):
+ try:
+ os.link(config_file, config_file.replace(sublogdirs[0], sublogdir))
+ except FileExistsError:
+ pass
+
+ run(
+ data_config_file,
+ pipeline_config_file,
+ plugin="Linear",
+ plugin_args={
+ "n_procs": int(
+ cast(
+ int | str,
+ c["pipeline_setup", "system_config", "max_cores_per_participant"],
+ )
+ ),
+ "memory_gb": int(
+ cast(
+ int | str,
+ c[
+ "pipeline_setup",
+ "system_config",
+ "maximum_memory_per_participant",
+ ],
+ )
+ ),
+ "raise_insufficient": c[
+ "pipeline_setup", "system_config", "raise_insufficient"
+ ],
+ },
+ tracking=False,
+ test_config=namespace.analysis_level == "test_config",
+ )
+
+ assert "can be output from" in caplog.text
+
+
# bids_dir = "/Users/steven.giavasis/data/HBN-SI_dataset/rawdata"
# test_dir = "/test_dir"
# cfg = "/Users/hecheng.jin/GitHub/DevBranch/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml"
-cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml"
-bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis"
-test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc"
# test_ingress_func_raw_data(cfg, bids_dir, test_dir)
# test_ingress_anat_raw_data(cfg, bids_dir, test_dir)
# test_ingress_pipeconfig_data(cfg, bids_dir, test_dir)
# test_build_anat_preproc_stack(cfg, bids_dir, test_dir)
if __name__ == "__main__":
+ cfg = "/Users/hecheng.jin/GitHub/pipeline_config_monkey-ABCDlocal.yml"
+ bids_dir = "/Users/hecheng.jin/Monkey/monkey_data_oxford/site-ucdavis"
+ test_dir = "/Users/hecheng.jin/GitHub/Test/T2preproc"
test_build_workflow(cfg, bids_dir, test_dir)
diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py
index 36a75a1a00..0b5e20da3f 100644
--- a/CPAC/pipeline/test/test_schema_validation.py
+++ b/CPAC/pipeline/test/test_schema_validation.py
@@ -113,3 +113,33 @@ def test_pipeline_name():
"""Test that pipeline_name sucessfully sanitizes."""
c = Configuration({"pipeline_setup": {"pipeline_name": ":va:lid name"}})
assert c["pipeline_setup", "pipeline_name"] == "valid_name"
+
+
+@pytest.mark.parametrize(
+ "registration_using",
+ [
+ list(combo)
+ for _ in [
+ list(combinations(["ANTS", "FSL", "FSL-linear"], i)) for i in range(1, 4)
+ ]
+ for combo in _
+ ],
+)
+def test_overwrite_transform(registration_using):
+ """Test that if overwrite transform method is already a registration method."""
+ # pylint: disable=invalid-name
+
+ d = {
+ "registration_workflows": {
+ "anatomical_registration": {
+ "registration": {"using": registration_using},
+ "overwrite_transform": {"run": "On", "using": "FSL"},
+ }
+ }
+ }
+ if "ANTS" in registration_using:
+ Configuration(d) # validates without exception
+ else:
+ with pytest.raises(ExclusiveInvalid) as e:
+ Configuration(d)
+ assert "Overwrite transform method is the same" in str(e.value)
diff --git a/CPAC/pipeline/utils.py b/CPAC/pipeline/utils.py
index 39acb6429f..d135addc41 100644
--- a/CPAC/pipeline/utils.py
+++ b/CPAC/pipeline/utils.py
@@ -17,6 +17,9 @@
"""C-PAC pipeline engine utilities."""
from itertools import chain
+import os
+import subprocess
+from typing import Optional
from CPAC.func_preproc.func_motion import motion_estimate_filter
from CPAC.utils.bids_utils import insert_entity
@@ -24,6 +27,20 @@
MOVEMENT_FILTER_KEYS = motion_estimate_filter.outputs
+def get_shell() -> str:
+ """Return the path to default shell."""
+ shell: Optional[str] = subprocess.getoutput(
+ f"which $(ps -p {os.getppid()} -o comm=)"
+ )
+ if not shell:
+ try:
+ shell = os.environ["_SHELL"]
+ except KeyError:
+ msg = "Shell command not found."
+ raise EnvironmentError(msg)
+ return shell
+
+
def name_fork(resource_idx, cfg, json_info, out_dct):
"""Create and insert entities for forkpoints.
diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py
index da63e694e4..5b5d7493a9 100644
--- a/CPAC/registration/registration.py
+++ b/CPAC/registration/registration.py
@@ -24,6 +24,7 @@
from nipype.interfaces.afni import utils as afni_utils
from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc
+from CPAC.func_preproc.func_preproc import fsl_afni_subworkflow
from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
@@ -1552,21 +1553,9 @@ def FSL_registration_connector(
}
if opt == "FSL":
- if (
- cfg.registration_workflows["anatomical_registration"]["registration"][
- "FSL-FNIRT"
- ]["ref_resolution"]
- == cfg.registration_workflows["anatomical_registration"][
- "resolution_for_anat"
- ]
- ):
- fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg(
- f"anat_mni_fnirt_register{symm}"
- )
- else:
- fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp(
- f"anat_mni_fnirt_register{symm}"
- )
+ fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg_nhp(
+ f"anat_mni_fnirt_register{symm}"
+ )
wf.connect(
inputNode, "input_brain", fnirt_reg_anat_mni, "inputspec.input_brain"
@@ -1602,55 +1591,34 @@ def FSL_registration_connector(
inputNode, "fnirt_config", fnirt_reg_anat_mni, "inputspec.fnirt_config"
)
- if (
- cfg.registration_workflows["anatomical_registration"]["registration"][
- "FSL-FNIRT"
- ]["ref_resolution"]
- == cfg.registration_workflows["anatomical_registration"][
- "resolution_for_anat"
- ]
- ):
- # NOTE: this is an UPDATE because of the opt block above
- added_outputs = {
- f"space-{sym}template_desc-preproc_{orig}": (
- fnirt_reg_anat_mni,
- "outputspec.output_brain",
- ),
- f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": (
- fnirt_reg_anat_mni,
- "outputspec.nonlinear_xfm",
- ),
- }
- outputs.update(added_outputs)
- else:
- # NOTE: this is an UPDATE because of the opt block above
- added_outputs = {
- f"space-{sym}template_desc-preproc_{orig}": (
- fnirt_reg_anat_mni,
- "outputspec.output_brain",
- ),
- f"space-{sym}template_desc-head_{orig}": (
- fnirt_reg_anat_mni,
- "outputspec.output_head",
- ),
- f"space-{sym}template_desc-{orig}_mask": (
- fnirt_reg_anat_mni,
- "outputspec.output_mask",
- ),
- f"space-{sym}template_desc-T1wT2w_biasfield": (
- fnirt_reg_anat_mni,
- "outputspec.output_biasfield",
- ),
- f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": (
- fnirt_reg_anat_mni,
- "outputspec.nonlinear_xfm",
- ),
- f"from-{orig}_to-{sym}{tmpl}template_mode-image_warp": (
- fnirt_reg_anat_mni,
- "outputspec.nonlinear_warp",
- ),
- }
- outputs.update(added_outputs)
+ # NOTE: this is an UPDATE because of the opt block above
+ added_outputs = {
+ f"space-{sym}template_desc-preproc_{orig}": (
+ fnirt_reg_anat_mni,
+ "outputspec.output_brain",
+ ),
+ f"space-{sym}template_desc-head_{orig}": (
+ fnirt_reg_anat_mni,
+ "outputspec.output_head",
+ ),
+ f"space-{sym}template_desc-{'brain' if orig == 'T1w' else orig}_mask": (
+ fnirt_reg_anat_mni,
+ "outputspec.output_mask",
+ ),
+ f"space-{sym}template_desc-T1wT2w_biasfield": (
+ fnirt_reg_anat_mni,
+ "outputspec.output_biasfield",
+ ),
+ f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm": (
+ fnirt_reg_anat_mni,
+ "outputspec.nonlinear_xfm",
+ ),
+ f"from-{orig}_to-{sym}{tmpl}template_mode-image_warp": (
+ fnirt_reg_anat_mni,
+ "outputspec.nonlinear_warp",
+ ),
+ }
+ outputs.update(added_outputs)
return (wf, outputs)
@@ -1736,7 +1704,7 @@ def ANTs_registration_connector(
"ANTs"
]["use_lesion_mask"]:
# Create lesion preproc node to apply afni Refit and Resample
- lesion_preproc = create_lesion_preproc(wf_name=f"lesion_preproc{symm}")
+ lesion_preproc = create_lesion_preproc(cfg, wf_name=f"lesion_preproc{symm}")
wf.connect(inputNode, "lesion_mask", lesion_preproc, "inputspec.lesion")
wf.connect(
lesion_preproc,
@@ -2271,7 +2239,7 @@ def bold_to_T1template_xfm_connector(
outputs={
"space-template_desc-preproc_T1w": {"Template": "T1w-brain-template"},
"space-template_desc-head_T1w": {"Template": "T1w-template"},
- "space-template_desc-T1w_mask": {"Template": "T1w-template"},
+ "space-template_desc-brain_mask": {"Template": "T1w-template"},
"space-template_desc-T1wT2w_biasfield": {"Template": "T1w-template"},
"from-T1w_to-template_mode-image_desc-linear_xfm": {"Template": "T1w-template"},
"from-template_to-T1w_mode-image_desc-linear_xfm": {"Template": "T1w-template"},
@@ -2306,23 +2274,11 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None):
node, out = connect
wf.connect(node, out, fsl, "inputspec.input_brain")
- if (
- cfg.registration_workflows["anatomical_registration"]["registration"][
- "FSL-FNIRT"
- ]["ref_resolution"]
- == cfg.registration_workflows["anatomical_registration"]["resolution_for_anat"]
- ):
- node, out = strat_pool.get_data("T1w-brain-template")
- wf.connect(node, out, fsl, "inputspec.reference_brain")
-
- node, out = strat_pool.get_data("T1w-template")
- wf.connect(node, out, fsl, "inputspec.reference_head")
- else:
- node, out = strat_pool.get_data("FNIRT-T1w-brain-template")
- wf.connect(node, out, fsl, "inputspec.reference_brain")
+ node, out = strat_pool.get_data("T1w-brain-template")
+ wf.connect(node, out, fsl, "inputspec.reference_brain")
- node, out = strat_pool.get_data("FNIRT-T1w-template")
- wf.connect(node, out, fsl, "inputspec.reference_head")
+ node, out = strat_pool.get_data("T1w-template")
+ wf.connect(node, out, fsl, "inputspec.reference_head")
node, out = strat_pool.get_data(
["desc-preproc_T1w", "space-longitudinal_desc-reorient_T1w"]
@@ -2362,26 +2318,28 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None):
"dilated-symmetric-brain-mask",
],
outputs={
- "space-symtemplate_desc-preproc_T1w": {
- "Template": "T1w-brain-template-symmetric"
- },
- "from-T1w_to-symtemplate_mode-image_desc-linear_xfm": {
- "Template": "T1w-template-symmetric"
- },
- "from-symtemplate_to-T1w_mode-image_desc-linear_xfm": {
- "Template": "T1w-template-symmetric"
- },
- "from-T1w_to-symtemplate_mode-image_xfm": {
- "Template": "T1w-template-symmetric"
- },
- "from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm": {
- "Template": "T1w-template-symmetric"
- },
- "from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm": {
- "Template": "T1w-template-symmetric"
+ **{
+ f"space-symtemplate_desc-{suffix}": {
+ "Template": "T1w-brain-template-symmetric"
+ }
+ for suffix in [
+ *[f"{desc}_T1w" for desc in ["brain", "preproc"]],
+ "brain_mask",
+ ]
},
- "from-longitudinal_to-symtemplate_mode-image_xfm": {
- "Template": "T1w-template-symmetric"
+ **{
+ output: {"Template": "T1w-template-symmetric"}
+ for output in [
+ "space-symtemplate_desc-head_T1w",
+ "from-T1w_to-symtemplate_mode-image_desc-linear_xfm",
+ "from-symtemplate_to-T1w_mode-image_desc-linear_xfm",
+ "from-T1w_to-symtemplate_mode-image_warp",
+ "from-T1w_to-symtemplate_mode-image_xfm",
+ "from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm",
+ "from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm",
+ "from-longitudinal_to-symtemplate_mode-image_xfm",
+ "space-symtemplate_desc-T1wT2w_biasfield",
+ ]
},
},
)
@@ -2896,7 +2854,6 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None):
outputs={
"space-template_desc-preproc_T1w": {"Template": "T1w-template"},
"space-template_desc-head_T1w": {"Template": "T1w-template"},
- "space-template_desc-T1w_mask": {"Template": "T1w-template"},
"from-T1w_to-template_mode-image_xfm": {"Template": "T1w-template"},
"from-template_to-T1w_mode-image_xfm": {"Template": "T1w-template"},
},
@@ -3116,14 +3073,46 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None
outputs = {
"space-template_desc-preproc_T1w": (apply_mask, "out_file"),
"space-template_desc-head_T1w": (fsl_apply_warp_t1_to_template, "out_file"),
- "space-template_desc-T1w_mask": (
- fsl_apply_warp_t1_brain_mask_to_template,
- "out_file",
- ),
"from-T1w_to-template_mode-image_xfm": (merge_xfms, "merged_file"),
"from-template_to-T1w_mode-image_xfm": (merge_inv_xfms, "merged_file"),
}
+ else:
+ outputs = {}
+
+ return (wf, outputs)
+
+
+@nodeblock(
+ name="mask_sbref",
+ switch=[
+ ["registration_workflows", "functional_registration", "coregistration", "run"],
+ [
+ "registration_workflows",
+ "functional_registration",
+ "coregistration",
+ "func_input_prep",
+ "mask_sbref",
+ ],
+ ],
+ inputs=["sbref", "space-bold_desc-brain_mask"],
+ outputs=["sbref"],
+)
+def mask_sbref(wf, cfg, strat_pool, pipe_num, opt=None):
+ """Mask sbref with brain mask."""
+ mask_sbref = pe.Node(interface=afni.Calc(), name=f"mask_sbref_{pipe_num}")
+
+ mask_sbref.inputs.expr = "a*b"
+ mask_sbref.inputs.outputtype = "NIFTI_GZ"
+
+ node, out = strat_pool.get_data("sbref")
+ wf.connect(node, out, mask_sbref, "in_file_a")
+
+ node, out = strat_pool.get_data("space-bold_desc-brain_mask")
+ wf.connect(node, out, mask_sbref, "in_file_b")
+
+ outputs = {"sbref": (mask_sbref, "out_file")}
+
return (wf, outputs)
@@ -3138,7 +3127,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None
"input",
],
option_val="Selected_Functional_Volume",
- inputs=[("desc-brain_bold", ["desc-motion_bold", "bold"], "sbref")],
+ inputs=[("desc-preproc_bold", "sbref")],
outputs=["sbref"],
)
def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None):
@@ -3153,15 +3142,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None):
outputtype="NIFTI_GZ",
)
- if not cfg.registration_workflows["functional_registration"]["coregistration"][
- "func_input_prep"
- ]["reg_with_skull"]:
- node, out = strat_pool.get_data("desc-brain_bold")
- else:
- # TODO check which file is functional_skull_leaf
- # TODO add a function to choose brain or skull?
- node, out = strat_pool.get_data(["desc-motion_bold", "bold"])
-
+ node, out = strat_pool.get_data("desc-preproc_bold")
wf.connect(node, out, get_func_volume, "in_file_a")
coreg_input = (get_func_volume, "out_file")
@@ -3223,14 +3204,34 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None):
"input",
],
option_val="fmriprep_reference",
- inputs=["desc-ref_bold"],
+ inputs=[
+ ("motion-basefile", "desc-preproc_bold"),
+ "FSL-AFNI-bold-ref",
+ "FSL-AFNI-brain-mask",
+ "FSL-AFNI-brain-probseg",
+ "desc-unifized_bold",
+ ],
outputs=["sbref"],
)
def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None):
"""Generate fMRIPrep-style single-band reference for coregistration."""
- coreg_input = strat_pool.get_data("desc-ref_bold")
+ outputs = {}
- outputs = {"sbref": coreg_input}
+ if not strat_pool.check_rpool("desc-unifized_bold"):
+ fsl_afni_wf = fsl_afni_subworkflow(cfg, pipe_num, opt)
+
+ for key in [
+ "FSL-AFNI-bold-ref",
+ "FSL-AFNI-brain-mask",
+ "FSL-AFNI-brain-probseg",
+ "motion-basefile",
+ ]:
+ node, out = strat_pool.get_data(key)
+ wf.connect(node, out, fsl_afni_wf, f"inputspec.{key}")
+
+ outputs["sbref"] = (fsl_afni_wf, "outputspec.desc-unifized_bold")
+ else:
+ outputs["sbref"] = strat_pool.get_data("desc-unifized_bold")
return (wf, outputs)
@@ -3251,7 +3252,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None):
),
(
"desc-preproc_T1w",
- "desc-restore-brain_T1w",
+ ["desc-restore-brain_T1w", "desc-preproc_T1w"],
"desc-preproc_T2w",
"desc-preproc_T2w",
"T2w",
@@ -3322,21 +3323,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None):
node, out = strat_pool.get_data("sbref")
wf.connect(node, out, func_to_anat, "inputspec.func")
- if (
- cfg.registration_workflows["functional_registration"]["coregistration"][
- "reference"
- ]
- == "brain"
- ):
- # TODO: use JSON meta-data to confirm
- node, out = strat_pool.get_data("desc-preproc_T1w")
- elif (
- cfg.registration_workflows["functional_registration"]["coregistration"][
- "reference"
- ]
- == "restore-brain"
- ):
- node, out = strat_pool.get_data("desc-restore-brain_T1w")
+ node, out = strat_pool.get_data(["desc-restore-brain_T1w", "desc-preproc_T1w"])
wf.connect(node, out, func_to_anat, "inputspec.anat")
if diff_complete:
@@ -3638,7 +3625,7 @@ def create_func_to_T1template_symmetric_xfm(wf, cfg, strat_pool, pipe_num, opt=N
"sbref",
"desc-preproc_bold",
"desc-stc_bold",
- "bold",
+ "desc-reorient_bold",
"from-bold_to-T1w_mode-image_desc-linear_xfm",
),
"despiked-fieldmap",
@@ -3726,7 +3713,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=
node, out = strat_pool.get_data("desc-stc_bold")
out_label = "desc-stc_bold"
elif opt == "abcd":
- node, out = strat_pool.get_data("bold")
+ node, out = strat_pool.get_data("desc-reorient_bold")
out_label = "bold"
wf.connect(node, out, warp_bold, "in_file")
@@ -3777,13 +3764,13 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=
"sbref",
"desc-preproc_bold",
"desc-stc_bold",
- "bold",
+ "desc-reorient_bold",
"from-bold_to-template_mode-image_xfm",
"ants-blip-warp",
"fsl-blip-warp",
)
],
- outputs=["desc-preproc_bold", "desc-stc_bold", "bold"],
+ outputs=["desc-preproc_bold", "desc-reorient_bold", "desc-stc_bold"],
)
def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None):
"""Apply blip to timeseries."""
@@ -3834,8 +3821,8 @@ def apply_blip_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt=None)
node, out = strat_pool.get_data("desc-stc_bold")
out_label = "desc-stc_bold"
elif opt == "abcd":
- node, out = strat_pool.get_data("bold")
- out_label = "bold"
+ node, out = strat_pool.get_data("desc-reorient_bold")
+ out_label = "desc-reorient_bold"
wf.connect(node, out, apply_xfm, "inputspec.input_image")
@@ -4098,7 +4085,12 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None)
option_key=["apply_transform", "using"],
option_val="abcd",
inputs=[
- ("desc-preproc_bold", "bold", "motion-basefile", "coordinate-transformation"),
+ (
+ "desc-preproc_bold",
+ "desc-reorient_bold",
+ "motion-basefile",
+ "coordinate-transformation",
+ ),
"from-T1w_to-template_mode-image_xfm",
"from-bold_to-T1w_mode-image_desc-linear_xfm",
"from-bold_to-template_mode-image_xfm",
@@ -4107,6 +4099,9 @@ def warp_timeseries_to_T1template_deriv(wf, cfg, strat_pool, pipe_num, opt=None)
"space-template_res-bold_desc-brain_T1w",
"space-template_desc-bold_mask",
"T1w-brain-template-funcreg",
+ "T1w-template-funcreg",
+ "space-template_desc-preproc_T1w",
+ "space-template_desc-brain_mask",
],
outputs={
"space-template_desc-preproc_bold": {"Template": "T1w-brain-template-funcreg"},
@@ -4163,8 +4158,27 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm")
wf.connect(node, out, convert_func_to_standard_warp, "warp2")
- node, out = strat_pool.get_data("space-template_res-bold_desc-brain_T1w")
- wf.connect(node, out, convert_func_to_standard_warp, "reference")
+ from CPAC.func_preproc.func_preproc import (
+ anat_brain_mask_to_bold_res,
+ anat_brain_to_bold_res,
+ )
+
+ anat_brain_to_func_res = anat_brain_to_bold_res(wf, cfg, pipe_num)
+
+ node, out = strat_pool.get_data("space-template_desc-preproc_T1w")
+ wf.connect(
+ node, out, anat_brain_to_func_res, "inputspec.space-template_desc-preproc_T1w"
+ )
+
+ node, out = strat_pool.get_data("T1w-template-funcreg")
+ wf.connect(node, out, anat_brain_to_func_res, "inputspec.T1w-template-funcreg")
+
+ wf.connect(
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ convert_func_to_standard_warp,
+ "reference",
+ )
# TODO add condition: if no gradient distortion
# https://github.com/DCAN-Labs/DCAN-HCP/blob/6466b78/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L283-L284
@@ -4176,7 +4190,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
extract_func_roi.inputs.t_min = 0
extract_func_roi.inputs.t_size = 3
- node, out = strat_pool.get_data("bold")
+ node, out = strat_pool.get_data("desc-reorient_bold")
wf.connect(node, out, extract_func_roi, "in_file")
# fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp
@@ -4194,7 +4208,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
split_func.inputs.dimension = "t"
- node, out = strat_pool.get_data("bold")
+ node, out = strat_pool.get_data("desc-reorient_bold")
wf.connect(node, out, split_func, "in_file")
### Loop starts! ###
@@ -4227,8 +4241,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
convert_registration_warp.inputs.out_relwarp = True
convert_registration_warp.inputs.relwarp = True
- node, out = strat_pool.get_data("space-template_res-bold_desc-brain_T1w")
- wf.connect(node, out, convert_registration_warp, "reference")
+ wf.connect(
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ convert_registration_warp,
+ "reference",
+ )
wf.connect(
convert_motion_distortion_warp, "out_file", convert_registration_warp, "warp1"
@@ -4265,8 +4283,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
convert_registration_warp, "out_file", applywarp_func_to_standard, "field_file"
)
- node, out = strat_pool.get_data("space-template_res-bold_desc-brain_T1w")
- wf.connect(node, out, applywarp_func_to_standard, "ref_file")
+ wf.connect(
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ applywarp_func_to_standard,
+ "ref_file",
+ )
# applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz
applywarp_func_mask_to_standard = pe.MapNode(
@@ -4289,8 +4311,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
"field_file",
)
- node, out = strat_pool.get_data("space-template_res-bold_desc-brain_T1w")
- wf.connect(node, out, applywarp_func_mask_to_standard, "ref_file")
+ wf.connect(
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ applywarp_func_mask_to_standard,
+ "ref_file",
+ )
### Loop ends! ###
@@ -4337,8 +4363,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
convert_dc_warp.inputs.out_relwarp = True
convert_dc_warp.inputs.relwarp = True
- node, out = strat_pool.get_data("space-template_res-bold_desc-brain_T1w")
- wf.connect(node, out, convert_dc_warp, "reference")
+ wf.connect(
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ convert_dc_warp,
+ "reference",
+ )
wf.connect(multiply_func_roi_by_zero, "out_file", convert_dc_warp, "warp1")
@@ -4355,8 +4385,12 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
node, out = strat_pool.get_data("motion-basefile")
wf.connect(node, out, applywarp_scout, "in_file")
- node, out = strat_pool.get_data("space-template_res-bold_desc-brain_T1w")
- wf.connect(node, out, applywarp_scout, "ref_file")
+ wf.connect(
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ applywarp_scout,
+ "ref_file",
+ )
wf.connect(convert_dc_warp, "out_file", applywarp_scout, "field_file")
@@ -4364,8 +4398,31 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
# fslmaths ${InputfMRI} -mas ${BrainMask} -mas ${InputfMRI}_mask -thr 0 -ing 10000 ${OutputfMRI} -odt float
merge_func_mask = pe.Node(util.Merge(2), name=f"merge_func_mask_{pipe_num}")
- node, out = strat_pool.get_data("space-template_desc-bold_mask")
- wf.connect(node, out, merge_func_mask, "in1")
+ anat_brain_mask_to_func_res = anat_brain_mask_to_bold_res(
+ wf_name="anat_brain_mask_to_bold_res", cfg=cfg, pipe_num=pipe_num
+ )
+
+ node, out = strat_pool.get_data("space-template_desc-brain_mask")
+ wf.connect(
+ node,
+ out,
+ anat_brain_mask_to_func_res,
+ "inputspec.space-template_desc-brain_mask",
+ )
+
+ wf.connect(
+ anat_brain_to_func_res,
+ "outputspec.space-template_res-bold_desc-brain_T1w",
+ anat_brain_mask_to_func_res,
+ "inputspec.space-template_desc-preproc_T1w",
+ )
+
+ wf.connect(
+ anat_brain_mask_to_func_res,
+ "outputspec.space-template_desc-bold_mask",
+ merge_func_mask,
+ "in1",
+ )
wf.connect(find_min_mask, "out_file", merge_func_mask, "in2")
@@ -4413,13 +4470,13 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
option_val="dcan_nhp",
inputs=[
(
- ["desc-reorient_bold", "bold"],
+ ["desc-reorient_bold", "desc-preproc_bold"],
"coordinate-transformation",
"from-T1w_to-template_mode-image_warp",
"from-bold_to-T1w_mode-image_desc-linear_warp",
"T1w-template",
"space-template_desc-head_T1w",
- "space-template_desc-T1w_mask",
+ "space-template_desc-brain_mask",
"space-template_desc-T1wT2w_biasfield",
)
],
@@ -4480,7 +4537,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No
"anatomical_registration"
]["registration"]["FSL-FNIRT"]["identity_matrix"]
- node, out = strat_pool.get_data("space-template_desc-T1w_mask")
+ node, out = strat_pool.get_data("space-template_desc-brain_mask")
wf.connect(node, out, applywarp_anat_mask_res, "in_file")
wf.connect(applywarp_anat_res, "out_file", applywarp_anat_mask_res, "ref_file")
@@ -4544,7 +4601,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No
extract_func_roi.inputs.t_min = 0
extract_func_roi.inputs.t_size = 3
- node, out = strat_pool.get_data(["desc-reorient_bold", "bold"])
+ node, out = strat_pool.get_data(["desc-reorient_bold", "desc-preproc_bold"])
wf.connect(node, out, extract_func_roi, "in_file")
# fslmaths "$fMRIFolder"/"$NameOffMRI"_gdc_warp -mul 0 "$fMRIFolder"/"$NameOffMRI"_gdc_warp
@@ -4562,7 +4619,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No
split_func.inputs.dimension = "t"
- node, out = strat_pool.get_data(["desc-reorient_bold", "bold"])
+ node, out = strat_pool.get_data(["desc-reorient_bold", "desc-preproc_bold"])
wf.connect(node, out, split_func, "in_file")
### Loop starts! ###
diff --git a/CPAC/registration/tests/mocks.py b/CPAC/registration/tests/mocks.py
index 18501c5a9a..4f35595abd 100644
--- a/CPAC/registration/tests/mocks.py
+++ b/CPAC/registration/tests/mocks.py
@@ -151,6 +151,7 @@ def configuration_strategy_mock(method="FSL"):
resampled_template.inputs.template = template
resampled_template.inputs.template_name = template_name
resampled_template.inputs.tag = tag
+ resampled_template.inputs.orientation = "RPI"
strat.update_resource_pool(
{template_name: (resampled_template, "resampled_template")}
diff --git a/CPAC/registration/tests/test_registration.py b/CPAC/registration/tests/test_registration.py
index 58741da445..d8e8228497 100755
--- a/CPAC/registration/tests/test_registration.py
+++ b/CPAC/registration/tests/test_registration.py
@@ -130,7 +130,7 @@ def test_registration_lesion():
anat_preproc.inputs.inputspec.anat = anat_file
- lesion_preproc = create_lesion_preproc(wf_name="lesion_preproc")
+ lesion_preproc = create_lesion_preproc(cfg, wf_name="lesion_preproc")
lesion_preproc.inputs.inputspec.lesion = lesion_file
diff --git a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml
index dd83685bc1..65bcb95b9c 100644
--- a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml
+++ b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml
@@ -251,11 +251,6 @@ bet_frac:
- brain_extraction
- FSL-BET
- frac
-bet_mask_boolean:
- - anatomical_preproc
- - brain_extraction
- - FSL-BET
- - mask_boolean
bet_mesh_boolean:
- anatomical_preproc
- brain_extraction
diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml
index 1cb360cdc9..cd5c14ad42 100644
--- a/CPAC/resources/configs/pipeline_config_abcd-options.yml
+++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml
@@ -13,6 +13,10 @@ pipeline_setup:
# Name for this pipeline configuration - useful for identification.
# This string will be sanitized and used in filepaths
pipeline_name: cpac_abcd-options
+ output_directory:
+ quality_control:
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
system_config:
# The maximum amount of memory each participant's workflow can allocate.
@@ -32,6 +36,12 @@ surface_analysis:
abcd_prefreesurfer_prep:
run: On
+ # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz
+
+ # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz
+
# Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives.
# If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline,
# select those 'Freesurfer-' labeled options further below in anatomical_preproc.
@@ -74,6 +84,9 @@ anatomical_preproc:
# this is a fork option
using: [FreeSurfer-ABCD]
+ restore_t1w_intensity:
+ run: On
+
# Non-local means filtering via ANTs DenoiseImage
non_local_means_filtering:
@@ -102,13 +115,6 @@ registration_workflows:
anatomical_registration:
run: On
registration:
- FSL-FNIRT:
-
- # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz
-
- # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz
# option parameters
ANTs:
@@ -200,17 +206,10 @@ registration_workflows:
run: On
func_input_prep:
- # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration
- reg_with_skull: On
-
# Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose.
# input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference']
input: [Selected_Functional_Volume]
- # reference: 'brain' or 'restore-brain'
- # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference
- reference: restore-brain
-
# Choose coregistration interpolation
interpolation: spline
diff --git a/CPAC/resources/configs/pipeline_config_abcd-prep.yml b/CPAC/resources/configs/pipeline_config_abcd-prep.yml
index d6542ea358..22ec01b021 100644
--- a/CPAC/resources/configs/pipeline_config_abcd-prep.yml
+++ b/CPAC/resources/configs/pipeline_config_abcd-prep.yml
@@ -13,6 +13,10 @@ pipeline_setup:
# Name for this pipeline configuration - useful for identification.
# This string will be sanitized and used in filepaths
pipeline_name: cpac_abcd-prep
+ output_directory:
+ quality_control:
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
system_config:
# The maximum amount of memory each participant's workflow can allocate.
@@ -32,6 +36,12 @@ surface_analysis:
abcd_prefreesurfer_prep:
run: On
+ # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz
+
+ # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz
+
anatomical_preproc:
run: On
acpc_alignment:
@@ -72,13 +82,6 @@ anatomical_preproc:
registration_workflows:
anatomical_registration:
registration:
- FSL-FNIRT:
-
- # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- ref_mask_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm_brain_mask_dil.nii.gz
-
- # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- T1w_template_res-2: /opt/dcan-tools/pipeline/global/templates/MNI152_T1_2mm.nii.gz
# option parameters
ANTs:
@@ -168,10 +171,6 @@ registration_workflows:
# input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference']
input: [Selected_Functional_Volume]
- # reference: 'brain' or 'restore-brain'
- # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference
- reference: restore-brain
-
# Choose coregistration interpolation
interpolation: spline
diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml
index af356132a9..0bb3fd15ac 100644
--- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml
+++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
# Include extra versions and intermediate steps of functional preprocessing in the output directory.
write_func_outputs: On
diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml
index 63e8fc0c92..04bc116581 100644
--- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml
+++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
# Include extra versions and intermediate steps of functional preprocessing in the output directory.
write_func_outputs: On
diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml
index 7f09680fc6..66f34fe41c 100644
--- a/CPAC/resources/configs/pipeline_config_blank.yml
+++ b/CPAC/resources/configs/pipeline_config_blank.yml
@@ -11,6 +11,9 @@ pipeline_setup:
# Name for this pipeline configuration - useful for identification.
# This string will be sanitized and used in filepaths
pipeline_name: cpac-blank-template
+
+ # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"
+ desired_orientation: RPI
output_directory:
# Quality control outputs
@@ -205,6 +208,12 @@ surface_analysis:
abcd_prefreesurfer_prep:
run: Off
+ # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz
+
+ # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz
+
# Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives.
# If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline,
# select those 'Freesurfer-' labeled options further below in anatomical_preproc.
@@ -410,6 +419,9 @@ anatomical_preproc:
# niworkflows-ants registration mask (can be optional)
regmask_path: /ants_template/oasis/T_template0_BrainCerebellumRegistrationMask.nii.gz
+ restore_t1w_intensity:
+ run: Off
+
run_t2: Off
# Bias field correction based on square root of T1w * T2w
@@ -576,12 +588,6 @@ registration_workflows:
# It is for monkey pipeline specifically.
FNIRT_T1w_template:
- # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz
-
- # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz
-
# Configuration file to be used by FSL to set FNIRT parameters.
# It is not necessary to change this path unless you intend to use custom FNIRT parameters or a non-standard template.
fnirt_config: T1_2_MNI152_2mm
@@ -702,9 +708,6 @@ registration_workflows:
run: Off
func_input_prep:
- # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration
- reg_with_skull: Off
-
# Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose.
# input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference']
input: [Mean_Functional]
@@ -720,6 +723,9 @@ registration_workflows:
#Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration.
func_reg_input_volume: 0
+ # Mask the sbref created by coregistration input prep nodeblocks above before registration
+ mask_sbref: On
+
boundary_based_registration:
# this is a fork point
@@ -741,16 +747,9 @@ registration_workflows:
# It is not necessary to change this path unless you intend to use non-standard MNI registration.
bbr_schedule: $FSLDIR/etc/flirtsch/bbr.sch
- # reference: 'brain' or 'restore-brain'
- # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference
- reference: brain
-
# Choose FSL or ABCD as coregistration method
using: FSL
- # Choose brain or whole-head as coregistration input
- input: brain
-
# Choose coregistration interpolation
interpolation: trilinear
diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml
index f73cedec84..891a800837 100644
--- a/CPAC/resources/configs/pipeline_config_ccs-options.yml
+++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
# Include extra versions and intermediate steps of functional preprocessing in the output directory.
write_func_outputs: On
diff --git a/CPAC/resources/configs/pipeline_config_default-deprecated.yml b/CPAC/resources/configs/pipeline_config_default-deprecated.yml
index cc768ce714..f774f8e479 100644
--- a/CPAC/resources/configs/pipeline_config_default-deprecated.yml
+++ b/CPAC/resources/configs/pipeline_config_default-deprecated.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
anatomical_preproc:
run: On
acpc_alignment:
diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml
index b7aa56c13f..5c22d2ee86 100644
--- a/CPAC/resources/configs/pipeline_config_default.yml
+++ b/CPAC/resources/configs/pipeline_config_default.yml
@@ -13,6 +13,9 @@ pipeline_setup:
# This string will be sanitized and used in filepaths
pipeline_name: cpac-default-pipeline
+ # Desired orientation for the output data. "RPI", "LPI", "RAI", "LAI", "RAS", "LAS", "RPS", "LPS"
+ desired_orientation: RPI
+
output_directory:
# Directory where C-PAC should write out processed data, logs, and crash reports.
@@ -51,10 +54,10 @@ pipeline_setup:
# Quality control outputs
quality_control:
# Generate quality control pages containing preprocessing and derivative outputs.
- generate_quality_control_images: True
+ generate_quality_control_images: On
# Generate eXtensible Connectivity Pipeline-style quality control files
- generate_xcpqc_files: False
+ generate_xcpqc_files: On
working_directory:
@@ -197,6 +200,12 @@ surface_analysis:
abcd_prefreesurfer_prep:
run: Off
+ # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz
+
+ # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
+ T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz
+
# Will run Freesurfer for surface-based analysis. Will output traditional Freesurfer derivatives.
# If you wish to employ Freesurfer outputs for brain masking or tissue segmentation in the voxel-based pipeline,
# select those 'Freesurfer-' labeled options further below in anatomical_preproc.
@@ -475,6 +484,9 @@ anatomical_preproc:
# Template to be used for FreeSurfer-BET brain extraction in CCS-options pipeline
T1w_brain_template_mask_ccs: /ccs_template/MNI152_T1_1mm_first_brain_mask.nii.gz
+ restore_t1w_intensity:
+ run: Off
+
segmentation:
@@ -738,12 +750,6 @@ registration_workflows:
# It is not necessary to change this path unless you intend to use a different template.
identity_matrix: $FSLDIR/etc/flirtsch/ident.mat
- # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- ref_mask_res-2: $FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz
-
- # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- T1w_template_res-2: $FSLDIR/data/standard/MNI152_T1_2mm.nii.gz
-
overwrite_transform:
run: Off
@@ -759,16 +765,9 @@ registration_workflows:
run: On
- # reference: 'brain' or 'restore-brain'
- # In ABCD-options pipeline, 'restore-brain' is used as coregistration reference
- reference: brain
-
# Choose FSL or ABCD as coregistration method
using: FSL
- # Choose brain or whole-head as coregistration input
- input: brain
-
# Choose coregistration interpolation
interpolation: trilinear
@@ -783,9 +782,6 @@ registration_workflows:
func_input_prep:
- # Choose whether to use functional brain or skull as the input to functional-to-anatomical registration
- reg_with_skull: Off
-
# Choose whether to use the mean of the functional/EPI as the input to functional-to-anatomical registration or one of the volumes from the functional 4D timeseries that you choose.
# input: ['Mean_Functional', 'Selected_Functional_Volume', 'fmriprep_reference']
input: ['Mean_Functional']
@@ -802,6 +798,9 @@ registration_workflows:
#Input the index of which volume from the functional 4D timeseries input file you wish to use as the input for functional-to-anatomical registration.
func_reg_input_volume: 0
+ # Mask the sbref created by coregistration input prep nodeblocks above before registration
+ mask_sbref: On
+
boundary_based_registration:
# this is a fork point
# run: [On, Off] - this will run both and fork the pipeline
diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml
index da6b97142f..1f6cf5e1ef 100644
--- a/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml
+++ b/CPAC/resources/configs/pipeline_config_fmriprep-ingress.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
outdir_ingress:
run: On
Template: MNI152NLin2009cAsym
diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml
index 555b52302d..842f371257 100644
--- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml
+++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml
@@ -13,6 +13,10 @@ pipeline_setup:
# Name for this pipeline configuration - useful for identification.
# This string will be sanitized and used in filepaths
pipeline_name: cpac_fmriprep-options
+ output_directory:
+ quality_control:
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
system_config:
# Select Off if you intend to run CPAC on a single machine.
@@ -151,12 +155,6 @@ registration_workflows:
registration:
FSL-FNIRT:
- # Reference mask with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- ref_mask_res-2:
-
- # Template with 2mm resolution to be used during FNIRT-based brain extraction in ABCD-options pipeline.
- T1w_template_res-2:
-
# Reference mask for FSL registration.
ref_mask:
diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml
index e1fb1e8e66..9289e85966 100644
--- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml
+++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
system_config:
# Select Off if you intend to run CPAC on a single machine.
diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml
index 17b1396759..4caef0c006 100644
--- a/CPAC/resources/configs/pipeline_config_monkey.yml
+++ b/CPAC/resources/configs/pipeline_config_monkey.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
system_config:
# Select Off if you intend to run CPAC on a single machine.
diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml
index af183e82c1..02cd19b673 100644
--- a/CPAC/resources/configs/pipeline_config_ndmg.yml
+++ b/CPAC/resources/configs/pipeline_config_ndmg.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
system_config:
# The number of cores to allocate to ANTS-based anatomical registration per participant.
diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml
index 22b0506092..7e61db6b8c 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-1.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
# Include extra versions and intermediate steps of functional preprocessing in the output directory.
write_func_outputs: On
diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml
index 574f9a6f4c..0ba3b198aa 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-2.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
# Include extra versions and intermediate steps of functional preprocessing in the output directory.
write_func_outputs: On
diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml
index 876e14cc58..d9a2cd679e 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-3.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
# Include extra versions and intermediate steps of functional preprocessing in the output directory.
write_func_outputs: On
diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml
index 534a5cf6b7..b33af48a33 100644
--- a/CPAC/resources/configs/pipeline_config_regtest-4.yml
+++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
# Include extra versions and intermediate steps of functional preprocessing in the output directory.
write_func_outputs: On
diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml
index a066241431..95bc06b9b8 100644
--- a/CPAC/resources/configs/pipeline_config_rodent.yml
+++ b/CPAC/resources/configs/pipeline_config_rodent.yml
@@ -21,6 +21,9 @@ pipeline_setup:
# Generate quality control pages containing preprocessing and derivative outputs.
generate_quality_control_images: On
+ # Generate eXtensible Connectivity Pipeline-style quality control files
+ generate_xcpqc_files: On
+
system_config:
# The maximum amount of memory each participant's workflow can allocate.
diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv
index 873defbbff..8fe4cd284f 100644
--- a/CPAC/resources/cpac_outputs.tsv
+++ b/CPAC/resources/cpac_outputs.tsv
@@ -166,7 +166,6 @@ desc-restore-brain_T1w T1w T1w anat NIfTI
space-template_desc-brain_T1w T1w template anat NIfTI Yes
space-template_desc-preproc_T1w T1w template anat NIfTI
space-template_desc-head_T1w T1w template anat NIfTI
-space-template_desc-T1w_mask mask template anat NIfTI
space-template_desc-Mean_timeseries timeseries func 1D
desc-MeanSCA_timeseries timeseries func 1D
desc-SpatReg_timeseries timeseries func 1D
diff --git a/CPAC/resources/cpac_templates.csv b/CPAC/resources/cpac_templates.csv
index 5c2abc9947..cf4cad758f 100644
--- a/CPAC/resources/cpac_templates.csv
+++ b/CPAC/resources/cpac_templates.csv
@@ -31,8 +31,8 @@ T1w-template-symmetric,"voxel_mirrored_homotopic_connectivity, symmetric_registr
T1w-template-symmetric-deriv,"voxel_mirrored_homotopic_connectivity, symmetric_registration, T1w_template_symmetric_funcreg","Symmetric version of the T1w-based whole-head template, resampled to the desired functional derivative resolution","registration_workflows, functional_registration, func_registration_to_template, output_resolution, func_derivative_outputs"
T1w-template-symmetric-for-resample,"voxel_mirrored_homotopic_connectivity, symmetric_registration, T1w_template_symmetric_for_resample",,
template-ref-mask,"registration_workflows, anatomical_registration, registration, FSL-FNIRT, ref_mask",,"registration_workflows, anatomical_registration, resolution_for_anat"
-template-ref-mask-res-2,"registration_workflows, anatomical_registration, registration, FSL-FNIRT, ref_mask_res-2",,
-T1w-template-res-2,"registration_workflows, anatomical_registration, registration, FSL-FNIRT, T1w_template_res-2",,
+template-ref-mask-res-2,"surface_analysis, abcd_prefreesurfer_prep, ref_mask_res-2",,
+T1w-template-res-2,"surface_analysis, abcd_prefreesurfer_prep, T1w_template_res-2",,
template-specification-file,"network_centrality, template_specification_file",Binary ROI mask for network centrality calculations,
unet-model,"anatomical_preproc, brain_extraction, UNet, unet_model",,
WM-path,"segmentation, tissue_segmentation, FSL-FAST, use_priors, WM_path",Template-space WM tissue prior,
diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py
index 13a4f72745..048cbe9b1c 100644
--- a/CPAC/resources/tests/test_templates.py
+++ b/CPAC/resources/tests/test_templates.py
@@ -19,6 +19,7 @@
import os
import pytest
+import nipype.pipeline.engine as pe
from CPAC.pipeline import ALL_PIPELINE_CONFIGS
from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool
@@ -29,11 +30,11 @@
@pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS)
def test_packaged_path_exists(pipeline):
"""
- Check that all local templates are included in image at at
- least one resolution.
+ Check that all local templates are included in image at atleast one resolution.
"""
- rpool = ingress_pipeconfig_paths(
- Preconfiguration(pipeline), ResourcePool(), "pytest"
+ wf = pe.Workflow(name="test")
+ wf, rpool = ingress_pipeconfig_paths(
+ wf, Preconfiguration(pipeline), ResourcePool(), "pytest"
)
for resource in rpool.rpool.values():
node = next(iter(resource.values())).get("data")[0]
diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py
index 1defe4e2d1..017ce4d604 100644
--- a/CPAC/surface/surf_preproc.py
+++ b/CPAC/surface/surf_preproc.py
@@ -928,7 +928,7 @@ def run_surface(
[
"space-template_desc-head_T1w",
"space-template_desc-brain_T1w",
- "space-template_desc-T1w_mask",
+ "space-template_desc-brain_mask",
],
[
"from-T1w_to-template_mode-image_xfm",
@@ -1202,7 +1202,7 @@ def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None):
space_temp = [
"space-template_desc-head_T1w",
"space-template_desc-brain_T1w",
- "space-template_desc-T1w_mask",
+ "space-template_desc-brain_mask",
]
atlas_xfm = [
"from-T1w_to-template_mode-image_xfm",
diff --git a/CPAC/utils/bids_utils.py b/CPAC/utils/bids_utils.py
index 34e72d430e..4ed9e45e7c 100755
--- a/CPAC/utils/bids_utils.py
+++ b/CPAC/utils/bids_utils.py
@@ -14,10 +14,13 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
+from base64 import b64decode
+from collections.abc import Iterable
import json
import os
import re
import sys
+from typing import Any, Callable, Optional
from warnings import warn
from botocore.exceptions import BotoCoreError
@@ -26,6 +29,16 @@
from CPAC.utils.monitoring import UTLOGGER
+class SpecifiedBotoCoreError(BotoCoreError):
+ """Specified :py:class:`~botocore.exceptions.BotoCoreError`."""
+
+ def __init__(self, msg: str, *args, **kwargs) -> None:
+ """Initialize BotoCoreError with message."""
+ msg = msg.format(**kwargs)
+ Exception.__init__(self, msg)
+ self.kwargs = kwargs
+
+
def bids_decode_fname(file_path, dbg=False, raise_error=True):
f_dict = {}
@@ -842,7 +855,7 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=""):
f"Error retrieving {s3_obj.key.replace(prefix, '')}"
f" ({e.message})"
)
- raise BotoCoreError(msg) from e
+ raise SpecifiedBotoCoreError(msg) from e
elif "nii" in str(s3_obj.key):
file_paths.append(
str(s3_obj.key).replace(prefix, "").lstrip("/")
@@ -868,9 +881,15 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=""):
): json.load(open(os.path.join(root, f), "r"))
}
)
- except UnicodeDecodeError:
+ except UnicodeDecodeError as unicode_decode_error:
msg = f"Could not decode {os.path.join(root, f)}"
- raise UnicodeDecodeError(msg)
+ raise UnicodeDecodeError(
+ unicode_decode_error.encoding,
+ unicode_decode_error.object,
+ unicode_decode_error.start,
+ unicode_decode_error.end,
+ msg,
+ )
if not file_paths and not config_dict:
msg = (
@@ -983,15 +1002,35 @@ def insert_entity(resource, key, value):
return "_".join([*new_entities[0], f"{key}-{value}", *new_entities[1], suff])
-def load_yaml_config(config_filename, aws_input_creds):
+def apply_modifications(
+ yaml_contents: str, modifications: Optional[list[Callable[[str], str]]]
+) -> str:
+ """Apply modification functions to YAML contents"""
+ if modifications:
+ for modification in modifications:
+ yaml_contents = modification(yaml_contents)
+ return yaml_contents
+
+
+def load_yaml_config(
+ config_filename: str,
+ aws_input_creds,
+ modifications: Optional[list[Callable[[str], str]]] = None,
+) -> dict | list | str:
+ """Load a YAML config file, possibly from AWS, with modifications applied.
+
+ `modifications` should be a list of functions that take a single string argument (the loaded YAML contents) and return a single string argument (the modified YAML contents).
+ """
if config_filename.lower().startswith("data:"):
try:
- header, encoded = config_filename.split(",", 1)
- config_content = b64decode(encoded)
+ _header, encoded = config_filename.split(",", 1)
+ config_content = apply_modifications(
+ b64decode(encoded).decode("utf-8"), modifications
+ )
return yaml.safe_load(config_content)
- except:
+ except Exception:
msg = f"Error! Could not find load config from data URI {config_filename}"
- raise BotoCoreError(msg)
+ raise SpecifiedBotoCoreError(msg=msg)
if config_filename.lower().startswith("s3://"):
# s3 paths begin with s3://bucket/
@@ -1013,7 +1052,8 @@ def load_yaml_config(config_filename, aws_input_creds):
config_filename = os.path.realpath(config_filename)
try:
- return yaml.safe_load(open(config_filename, "r"))
+ with open(config_filename, "r") as _f:
+ return yaml.safe_load(apply_modifications(_f.read(), modifications))
except IOError:
msg = f"Error! Could not find config file {config_filename}"
raise FileNotFoundError(msg)
@@ -1110,6 +1150,25 @@ def create_cpac_data_config(
return sub_list
+def _check_value_type(
+ sub_list: list[dict[str, Any]],
+ keys: list[str] = ["subject_id", "unique_id"],
+ value_type: type = int,
+ any_or_all: Callable[[Iterable], bool] = any,
+) -> bool:
+ """Check if any or all of a key in a sub_list is of a given type."""
+ return any_or_all(
+ isinstance(sub.get(key), value_type) for key in keys for sub in sub_list
+ )
+
+
+def coerce_data_config_strings(contents: str) -> str:
+ """Coerge `subject_id` and `unique_id` to be strings."""
+ for key in ["subject_id: ", "unique_id: "]:
+ contents = re.sub(f"{key}(?!!!)", f"{key}!!str ", contents)
+ return contents.replace(": !!str !!", ": !!")
+
+
def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds):
"""
Loads the file as a check to make sure it is available and readable.
@@ -1127,7 +1186,9 @@ def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds)
-------
list
"""
- sub_list = load_yaml_config(data_config_file, aws_input_creds)
+ sub_list: list[dict[str, str]] = load_yaml_config(
+ data_config_file, aws_input_creds, modifications=[coerce_data_config_strings]
+ )
if participant_labels:
sub_list = [
diff --git a/CPAC/utils/build_data_config.py b/CPAC/utils/build_data_config.py
index 8be6c6b234..e17cf2ed30 100644
--- a/CPAC/utils/build_data_config.py
+++ b/CPAC/utils/build_data_config.py
@@ -16,14 +16,12 @@
# License along with C-PAC. If not, see .
"""Build a C-PAC data configuration."""
-from logging import basicConfig, INFO
from pathlib import Path
from typing import Any
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.utils.data-config")
-basicConfig(format="%(message)s", level=INFO)
def _cannot_write(file_name: Path | str) -> None:
diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py
index 8444cce105..c4542f579d 100644
--- a/CPAC/utils/configuration/configuration.py
+++ b/CPAC/utils/configuration/configuration.py
@@ -50,7 +50,10 @@ class Configuration:
will form the base of the Configuration object with the values in
the given dictionary overriding matching keys in the base at any
depth. If no ``FROM`` key is included, the base Configuration is
- the default Configuration.
+ the blank preconfiguration.
+
+ .. versionchanged:: 1.8.5
+ From version 1.8.0 to version 1.8.5, unspecified keys were based on the default configuration rather than the blank preconfiguration.
``FROM`` accepts either the name of a preconfigured pipleine or a
path to a YAML file.
diff --git a/CPAC/utils/create_fsl_flame_preset.py b/CPAC/utils/create_fsl_flame_preset.py
index 856c10a3b4..848fe5e9fe 100644
--- a/CPAC/utils/create_fsl_flame_preset.py
+++ b/CPAC/utils/create_fsl_flame_preset.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018-2024 C-PAC Developers
+# Copyright (C) 2018-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -1092,20 +1092,6 @@ def run(
import os
- import pandas as pd
- import pkg_resources as p
-
- # make life easy
- keys_csv = p.resource_filename("CPAC", "resources/cpac_outputs.csv")
- try:
- pd.read_csv(keys_csv)
- except Exception as e:
- err = (
- "\n[!] Could not access or read the cpac_outputs.csv "
- f"resource file:\n{keys_csv}\n\nError details {e}\n"
- )
- raise Exception(err)
-
if derivative_list == "all":
derivative_list = [
"alff",
diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py
index 008e674c2d..25adb1eeca 100644
--- a/CPAC/utils/datasource.py
+++ b/CPAC/utils/datasource.py
@@ -1156,7 +1156,7 @@ def res_string_to_tuple(resolution):
return (float(resolution.replace("mm", "")),) * 3
-def resolve_resolution(resolution, template, template_name, tag=None):
+def resolve_resolution(orientation, resolution, template, template_name, tag=None):
"""Resample a template to a given resolution."""
from nipype.interfaces import afni
@@ -1203,6 +1203,7 @@ def resolve_resolution(resolution, template, template_name, tag=None):
resample.inputs.resample_mode = "Cu"
resample.inputs.in_file = local_path
resample.base_dir = "."
+ resample.inputs.orientation = orientation
resampled_template = resample.run()
local_path = resampled_template.outputs.out_file
diff --git a/CPAC/utils/interfaces/netcorr.py b/CPAC/utils/interfaces/netcorr.py
index aee9a4d13d..6af44a15ab 100644
--- a/CPAC/utils/interfaces/netcorr.py
+++ b/CPAC/utils/interfaces/netcorr.py
@@ -19,6 +19,61 @@
class NetCorr(NipypeNetCorr): # noqa: D101
input_spec = NetCorrInputSpec
+ def _list_outputs(self):
+ """``nipype.interfaces.afni.preprocess.NetCorr._list_outputs`` with a bugfix.
+
+ Notes
+ -----
+ This method can be removed once nipy/nipype#3697 is merged and a release
+ including that PR is included in the C-PAC image.
+ """
+ # STATEMENT OF CHANGES:
+ # This function is derived from sources licensed under the Apache-2.0 terms,
+ # and this function has been changed.
+
+ # CHANGES:
+ # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being merged and released.
+
+ # ORIGINAL WORK'S ATTRIBUTION NOTICE:
+ # Copyright (c) 2009-2016, Nipype developers
+
+ # Licensed under the Apache License, Version 2.0 (the "License");
+ # you may not use this file except in compliance with the License.
+ # You may obtain a copy of the License at
+
+ # http://www.apache.org/licenses/LICENSE-2.0
+
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+
+ # Prior to release 0.12, Nipype was licensed under a BSD license.
+
+ # Modifications copyright (C) 2024 C-PAC Developers
+ import glob
+ import os
+
+ from nipype.interfaces.base.traits_extension import isdefined
+
+ outputs = self.output_spec().get()
+
+ if not isdefined(self.inputs.out_file):
+ prefix = self._gen_fname(self.inputs.in_file, suffix="_netcorr")
+ else:
+ prefix = self.inputs.out_file
+
+ # All outputs should be in the same directory as the prefix
+ odir = os.path.dirname(os.path.abspath(prefix))
+ outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0]
+
+ if self.inputs.ts_wb_corr or self.inputs.ts_wb_Z:
+ corrdir = os.path.join(odir, prefix + "_000_INDIV")
+ outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz"))
+
+ return outputs
+
NetCorr.__doc__ = f"""{NipypeNetCorr.__doc__}
`CPAC.utils.interfaces.netcorr.NetCorr` adds an additional optional input, `automask_off`
diff --git a/CPAC/utils/io.py b/CPAC/utils/io.py
new file mode 100644
index 0000000000..12d7d7f5d1
--- /dev/null
+++ b/CPAC/utils/io.py
@@ -0,0 +1,37 @@
+# Copyright (C) 2012-2024 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Utilities for inputs and outputs."""
+
+from pathlib import Path
+
+from yaml import safe_load, YAMLError
+
+
+def load_yaml(
+ path: Path | str, desc: str = "YAML file", encoding="utf8"
+) -> dict | list | str:
+ """Try to load a YAML file to a Python object."""
+ path = Path(path).absolute()
+ try:
+ with path.open("r", encoding=encoding) as _yaml:
+ result = safe_load(_yaml)
+ except FileNotFoundError as error:
+ raise error
+ except Exception as error:
+ msg = f"{desc} is not in proper YAML format. Please check {path}"
+ raise YAMLError(msg) from error
+ return result
diff --git a/CPAC/utils/monitoring/custom_logging.py b/CPAC/utils/monitoring/custom_logging.py
index abd6b63438..3d8d1b842a 100644
--- a/CPAC/utils/monitoring/custom_logging.py
+++ b/CPAC/utils/monitoring/custom_logging.py
@@ -21,6 +21,7 @@
import subprocess
from sys import exc_info as sys_exc_info
from traceback import print_exception
+from typing import Optional, Sequence
from nipype import logging as nipype_logging
@@ -59,7 +60,14 @@ def getLogger(name): # pylint: disable=invalid-name
if name in MOCK_LOGGERS:
return MOCK_LOGGERS[name]
logger = nipype_logging.getLogger(name)
- return logging.getLogger(name) if logger is None else logger
+ if logger is None:
+ logger = logging.getLogger(name)
+ if not logger.handlers:
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter("%(message)s"))
+ logger.setLevel(logging.INFO)
+ logger.addHandler(handler)
+ return logger
# Nipype built-in loggers
@@ -171,7 +179,9 @@ def _log(message, *items, exc_info=False):
logging, level.upper(), logging.NOTSET
):
with open(
- self.handlers[0].baseFilename, "a", encoding="utf-8"
+ MockLogger._get_first_file_handler(self.handlers).baseFilename,
+ "a",
+ encoding="utf-8",
) as log_file:
if exc_info and isinstance(message, Exception):
value, traceback = sys_exc_info()[1:]
@@ -190,6 +200,16 @@ def delete(self):
"""Delete the mock logger from memory."""
del MOCK_LOGGERS[self.name]
+ @staticmethod
+ def _get_first_file_handler(
+ handlers: Sequence[logging.Handler | MockHandler],
+ ) -> Optional[logging.FileHandler | MockHandler]:
+ """Given a list of Handlers, return the first FileHandler found or return None."""
+ for handler in handlers:
+ if isinstance(handler, (logging.FileHandler, MockHandler)):
+ return handler
+ return None
+
def _lazy_sub(message, *items):
"""Given lazy-logging syntax, return string with substitutions.
@@ -252,12 +272,12 @@ def set_up_logger(
Examples
--------
>>> lg = set_up_logger('test')
- >>> lg.handlers[0].baseFilename.split('/')[-1]
+ >>> MockLogger._get_first_file_handler(lg.handlers).baseFilename.split('/')[-1]
'test.log'
>>> lg.level
0
>>> lg = set_up_logger('second_test', 'specific_filename.custom', 'debug')
- >>> lg.handlers[0].baseFilename.split('/')[-1]
+ >>> MockLogger._get_first_file_handler(lg.handlers).baseFilename.split('/')[-1]
'specific_filename.custom'
>>> lg.level
10
diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py
index 089e9fdd39..67f26693f9 100644
--- a/CPAC/utils/monitoring/draw_gantt_chart.py
+++ b/CPAC/utils/monitoring/draw_gantt_chart.py
@@ -401,7 +401,7 @@ def generate_gantt_chart(
return
for node in nodes_list:
- if "duration" not in node:
+ if "duration" not in node and (node["start"] and node["finish"]):
node["duration"] = (node["finish"] - node["start"]).total_seconds()
# Create the header of the report with useful information
diff --git a/CPAC/utils/ndmg_utils.py b/CPAC/utils/ndmg_utils.py
index 0623118e75..1680e8edf6 100644
--- a/CPAC/utils/ndmg_utils.py
+++ b/CPAC/utils/ndmg_utils.py
@@ -32,7 +32,6 @@
# Modifications Copyright (C) 2022-2024 C-PAC Developers
# This file is part of C-PAC.
-from logging import basicConfig, INFO
import os
import numpy as np
@@ -41,7 +40,6 @@
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("nuerodata.m2g.ndmg")
-basicConfig(format="%(message)s", level=INFO)
def ndmg_roi_timeseries(func_file, label_file):
diff --git a/CPAC/utils/outputs.py b/CPAC/utils/outputs.py
index 11b81eb60f..f148bba87d 100644
--- a/CPAC/utils/outputs.py
+++ b/CPAC/utils/outputs.py
@@ -1,13 +1,36 @@
+# Copyright (C) 2018-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Specify the resources that C-PAC writes to the output direcotry."""
+
+from importlib.resources import files
+from typing import ClassVar
+
import pandas as pd
-import pkg_resources as p
class Outputs:
- # Settle some things about the resource pool reference and the output directory
- reference_csv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv")
+ """Settle some things about the resource pool reference and the output directory."""
+
+ reference_csv = str(files("CPAC").joinpath("resources/cpac_outputs.tsv"))
try:
- reference = pd.read_csv(reference_csv, delimiter="\t", keep_default_na=False)
+ reference: ClassVar[pd.DataFrame] = pd.read_csv(
+ reference_csv, delimiter="\t", keep_default_na=False
+ )
except Exception as e:
err = (
"\n[!] Could not access or read the cpac_outputs.tsv "
@@ -27,8 +50,12 @@ class Outputs:
reference[reference["4D Time Series"] == "Yes"]["Resource"]
)
- anat = list(reference[reference["Sub-Directory"] == "anat"]["Resource"])
- func = list(reference[reference["Sub-Directory"] == "func"]["Resource"])
+ anat: ClassVar[list[str]] = list(
+ reference[reference["Sub-Directory"] == "anat"]["Resource"]
+ )
+ func: ClassVar[list[str]] = list(
+ reference[reference["Sub-Directory"] == "func"]["Resource"]
+ )
# outputs to send into smoothing, if smoothing is enabled, and
# outputs to write out if the user selects to write non-smoothed outputs
@@ -45,6 +72,8 @@ class Outputs:
all_template_filter = _template_filter | _epitemplate_filter | _symtemplate_filter
all_native_filter = _T1w_native_filter | _bold_native_filter | _long_native_filter
+ bold_native: ClassVar[list[str]] = list(reference[_bold_native_filter]["Resource"])
+
native_nonsmooth = list(
reference[all_native_filter & _nonsmoothed_filter]["Resource"]
)
@@ -101,3 +130,11 @@ def _is_gifti(_file_key):
for gifti in giftis.itertuples()
if " " in gifti.File
}
+
+
+def group_derivatives(pull_func: bool = False) -> list[str]:
+ """Gather keys for anatomical and functional derivatives for group analysis."""
+ derivatives: list[str] = Outputs.func + Outputs.anat
+ if pull_func:
+ derivatives = derivatives + Outputs.bold_native
+ return derivatives
diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py
index 336488f318..ea16c0be36 100644
--- a/CPAC/utils/test_mocks.py
+++ b/CPAC/utils/test_mocks.py
@@ -235,6 +235,7 @@ def configuration_strategy_mock(method="FSL"):
resampled_template.inputs.template = template
resampled_template.inputs.template_name = template_name
resampled_template.inputs.tag = tag
+ resampled_template.inputs.orientation = "RPI"
strat.update_resource_pool(
{template_name: (resampled_template, "resampled_template")}
diff --git a/CPAC/utils/tests/configs/__init__.py b/CPAC/utils/tests/configs/__init__.py
index f8a23bd4e6..896c79bf69 100644
--- a/CPAC/utils/tests/configs/__init__.py
+++ b/CPAC/utils/tests/configs/__init__.py
@@ -1,15 +1,21 @@
"""Configs for testing."""
-from pathlib import Path
+from importlib import resources
+
+try:
+ from importlib.resources.abc import Traversable
+except ModuleNotFoundError: # TODO: Remove this block once minimum Python version includes `importlib.resources.abc`
+ from importlib.abc import Traversable
-from pkg_resources import resource_filename
import yaml
-_TEST_CONFIGS_PATH = Path(resource_filename("CPAC", "utils/tests/configs"))
-with open(_TEST_CONFIGS_PATH / "neurostars_23786.yml", "r", encoding="utf-8") as _f:
+_TEST_CONFIGS_PATH: Traversable = resources.files("CPAC").joinpath(
+ "utils/tests/configs"
+)
+with (_TEST_CONFIGS_PATH / "neurostars_23786.yml").open("r", encoding="utf-8") as _f:
# A loaded YAML file to test https://tinyurl.com/neurostars23786
NEUROSTARS_23786 = _f.read()
-with open(_TEST_CONFIGS_PATH / "neurostars_24035.yml", "r", encoding="utf-8") as _f:
+with (_TEST_CONFIGS_PATH / "neurostars_24035.yml").open("r", encoding="utf-8") as _f:
# A loaded YAML file to test https://tinyurl.com/neurostars24035
NEUROSTARS_24035 = _f.read()
# A loaded YAML file to test https://tinyurl.com/cmicnlslack420349
diff --git a/CPAC/utils/tests/configs/github_2144.yml b/CPAC/utils/tests/configs/github_2144.yml
new file mode 100644
index 0000000000..a7d405c8ea
--- /dev/null
+++ b/CPAC/utils/tests/configs/github_2144.yml
@@ -0,0 +1,8 @@
+- site: site-1
+ subject_id: 01
+ unique_id: 02
+ derivatives_dir: /fprep/sub-0151
+- site: site-1
+ subject_id: !!str 02
+ unique_id: 02
+ derivatives_dir: /fprep/sub-0151
diff --git a/CPAC/utils/tests/test_bids_utils.py b/CPAC/utils/tests/test_bids_utils.py
index 57c0abef56..2b7267af94 100644
--- a/CPAC/utils/tests/test_bids_utils.py
+++ b/CPAC/utils/tests/test_bids_utils.py
@@ -16,7 +16,7 @@
# License along with C-PAC. If not, see .
"""Tests for bids_utils."""
-from logging import basicConfig, INFO
+from importlib import resources
import os
from subprocess import run
@@ -24,17 +24,18 @@
import yaml
from CPAC.utils.bids_utils import (
+ _check_value_type,
bids_gen_cpac_sublist,
cl_strip_brackets,
collect_bids_files_configs,
create_cpac_data_config,
load_cpac_data_config,
+ load_yaml_config,
sub_list_filter_by_labels,
)
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.utils.tests")
-basicConfig(format="%(message)s", level=INFO)
def create_sample_bids_structure(root_dir):
@@ -109,6 +110,19 @@ def test_gen_bids_sublist(bids_dir, test_yml, creds_path, dbg=False):
assert sublist
+def test_load_data_config_with_ints() -> None:
+ """Check that C-PAC coerces sub- and ses- ints to strings."""
+ data_config_file = resources.files("CPAC").joinpath(
+ "utils/tests/configs/github_2144.yml"
+ )
+ # make sure there are ints in the test data
+ assert _check_value_type(load_yaml_config(str(data_config_file), None))
+ # make sure there aren't ints when it's loaded through the loader
+ assert not _check_value_type(
+ load_cpac_data_config(str(data_config_file), None, None)
+ )
+
+
@pytest.mark.parametrize("t1w_label", ["acq-HCP", "acq-VNavNorm", "T1w", None])
@pytest.mark.parametrize(
"bold_label", ["task-peer_run-1", "[task-peer_run-1 task-peer_run-2]", "bold", None]
diff --git a/CPAC/utils/tests/test_symlinks.py b/CPAC/utils/tests/test_symlinks.py
index 570d2e9b74..a2ddca02c9 100644
--- a/CPAC/utils/tests/test_symlinks.py
+++ b/CPAC/utils/tests/test_symlinks.py
@@ -14,7 +14,6 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from logging import basicConfig, INFO
import os
import tempfile
@@ -24,7 +23,6 @@
from CPAC.utils.symlinks import create_symlinks
logger = getLogger("CPAC.utils.tests")
-basicConfig(format="%(message)s", level=INFO)
mocked_outputs = p.resource_filename(
"CPAC", os.path.join("utils", "tests", "test_symlinks-outputs.txt")
diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py
index b459262993..69b96be4ca 100644
--- a/CPAC/utils/utils.py
+++ b/CPAC/utils/utils.py
@@ -2631,3 +2631,11 @@ def _replace_in_value_list(current_value, replacement_tuple):
for v in current_value
if bool(v) and v not in {"None", "Off", ""}
]
+
+
+def flip_orientation_code(code):
+ """
+ Reverts an orientation code by flipping R↔L, A↔P, and I↔S.
+ """
+ flip_dict = {"R": "L", "L": "R", "A": "P", "P": "A", "I": "S", "S": "I"}
+ return "".join(flip_dict[c] for c in code)
diff --git a/CPAC/vmhc/tests/test_vmhc.py b/CPAC/vmhc/tests/test_vmhc.py
index 2471a9b02c..e66d3cd782 100644
--- a/CPAC/vmhc/tests/test_vmhc.py
+++ b/CPAC/vmhc/tests/test_vmhc.py
@@ -14,7 +14,6 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from logging import basicConfig, INFO
import os
import pytest
@@ -25,7 +24,6 @@
from CPAC.vmhc.vmhc import vmhc as create_vmhc
logger = getLogger("CPAC.utils.tests")
-basicConfig(format="%(message)s", level=INFO)
@pytest.mark.skip(reason="test needs refactoring")
diff --git a/Dockerfile b/Dockerfile
index 838d8dcc4b..1debc54c4a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -15,8 +15,8 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1
-LABEL org.opencontainers.image.description "Full C-PAC image"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.description="Full C-PAC image"
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# install C-PAC
@@ -45,7 +45,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \
&& chmod 777 $(ls / | grep -v sys | grep -v proc)
ENV PYTHONUSERBASE=/home/c-pac_user/.local
ENV PATH=$PATH:/home/c-pac_user/.local/bin \
- PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages
+ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \
+ _SHELL=/bin/bash
# set user
WORKDIR /home/c-pac_user
diff --git a/README.md b/README.md
index 137bc57972..c320755101 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,8 @@ C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANT
You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . -->
C-PAC: Configurable Pipeline for the Analysis of Connectomes
============================================================
-[](https://doi.org/10.1101/2021.12.01.470790) [](https://doi.org/10.5281/zenodo.164638)
+[](https://doi.org/10.1101/2021.12.01.470790) [](https://doi.org/10.5281/zenodo.164638) [](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main)
+
[](./COPYING.LESSER)
diff --git a/dev/circleci_data/conftest.py b/dev/circleci_data/conftest.py
new file mode 100644
index 0000000000..4966b986c5
--- /dev/null
+++ b/dev/circleci_data/conftest.py
@@ -0,0 +1,19 @@
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Global fixtures for C-PAC tests."""
+
+from CPAC._global_fixtures import * # noqa: F403
diff --git a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml
index 5449692350..c196250ac8 100644
--- a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml
+++ b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml
@@ -15,7 +15,7 @@ dataFormat: BIDS
# BIDS Data Format only.
#
# This should be the path to the overarching directory containing the entire dataset.
-bidsBaseDir: ./bids-examples/ds051
+bidsBaseDir: ./ds051
# File Path Template for Anatomical Files
@@ -49,7 +49,7 @@ awsCredentialsFile: None
# Directory where CPAC should place data configuration files.
-outputSubjectListLocation: ./dev/circleci_data
+outputSubjectListLocation: /code/dev/circleci_data
# A label to be appended to the generated participant list files.
diff --git a/dev/circleci_data/test_external_utils.py b/dev/circleci_data/test_external_utils.py
index f516b0c903..c55e264c8b 100644
--- a/dev/circleci_data/test_external_utils.py
+++ b/dev/circleci_data/test_external_utils.py
@@ -31,8 +31,6 @@
from CPAC.__main__ import utils as CPAC_main_utils # noqa: E402
-# pylint: disable=wrong-import-position
-
def _click_backport(command, key):
"""Switch back to underscores for older versions of click."""
@@ -93,18 +91,11 @@ def test_build_data_config(caplog, cli_runner, multiword_connector):
_delete_test_yaml(test_yaml)
-def test_new_settings_template(caplog, cli_runner):
+def test_new_settings_template(bids_examples: Path, caplog, cli_runner):
"""Test CLI ``utils new-settings-template``."""
caplog.set_level(INFO)
- os.chdir(CPAC_DIR)
-
- example_dir = os.path.join(CPAC_DIR, "bids-examples")
- if not os.path.exists(example_dir):
- from git import Repo
-
- Repo.clone_from(
- "https://github.com/bids-standard/bids-examples.git", example_dir
- )
+ assert bids_examples.exists()
+ os.chdir(bids_examples)
result = cli_runner.invoke(
CPAC_main_utils.commands[
diff --git a/dev/circleci_data/test_in_image.sh b/dev/circleci_data/test_in_image.sh
index b62de84994..d03b6e8015 100755
--- a/dev/circleci_data/test_in_image.sh
+++ b/dev/circleci_data/test_in_image.sh
@@ -1,5 +1,8 @@
export PATH=$PATH:/home/$(whoami)/.local/bin
+# don't force SSH for git clones in testing image
+git config --global --unset url.ssh://git@github.com.insteadof
+
# install testing requirements
pip install -r /code/dev/circleci_data/requirements.txt
diff --git a/requirements.txt b/requirements.txt
index 58afacfa6d..bb9f9b6c73 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -40,12 +40,12 @@ voluptuous==0.13.1
# the below are pinned specifically to match what the FSL installer installs
botocore==1.31.4
charset-normalizer==3.1.0
-cryptography==42.0.3
+cryptography==44.0.1
h5py==3.8.0
importlib-metadata==6.8.0
lxml==4.9.2
pip==23.3
setuptools==70.0.0
-urllib3==1.26.18
+urllib3==1.26.19
wheel==0.40.0
zipp==3.19.1
diff --git a/setup.py b/setup.py
index 17919395d2..f22a744e2d 100755
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2022-2024 C-PAC Developers
+# Copyright (C) 2022-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -84,7 +84,12 @@ def main(**extra_args):
extras_require={"graphviz": ["pygraphviz"]},
configuration=configuration,
scripts=glob("scripts/*"),
- entry_points={"console_scripts": ["cpac = CPAC.__main__:main"]},
+ entry_points={
+ "console_scripts": [
+ "cpac = CPAC.__main__:main",
+ "resource_inventory = CPAC.pipeline.resource_inventory:main",
+ ]
+ },
package_data={
"CPAC": [
"test_data/*",
diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile
index b58801b519..20561f09aa 100644
--- a/variant-lite.Dockerfile
+++ b/variant-lite.Dockerfile
@@ -15,8 +15,8 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1
-LABEL org.opencontainers.image.description "Full C-PAC image without FreeSurfer"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer"
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# install C-PAC
@@ -46,7 +46,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \
&& chmod 777 $(ls / | grep -v sys | grep -v proc)
ENV PYTHONUSERBASE=/home/c-pac_user/.local
ENV PATH=$PATH:/home/c-pac_user/.local/bin \
- PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages
+ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \
+ _SHELL=/bin/bash
# set user
WORKDIR /home/c-pac_user